From eb2da9674524db0456b21bfabd3fc05d843f061d Mon Sep 17 00:00:00 2001 From: Bret Comnes Date: Sat, 29 Feb 2020 16:55:27 -0700 Subject: [PATCH] feat: use ncc for building --- dist/index.js | 10890 ++++++++++++++++ dist/index.js.map | 1 + dist/sourcemap-register.js | 3933 ++++++ node_modules/@actions/core/README.md | 140 - node_modules/@actions/core/lib/command.d.ts | 16 - node_modules/@actions/core/lib/command.js | 78 - node_modules/@actions/core/lib/command.js.map | 1 - node_modules/@actions/core/lib/core.d.ts | 112 - node_modules/@actions/core/lib/core.js | 202 - node_modules/@actions/core/lib/core.js.map | 1 - node_modules/@actions/core/package.json | 66 - .../.github/workflows/tests.yml | 25 - .../async-folder-walker/.vscode/launch.json | 22 - node_modules/async-folder-walker/CHANGELOG.md | 19 - .../async-folder-walker/CODE_OF_CONDUCT.md | 4 - .../async-folder-walker/CONTRIBUTING.md | 11 - node_modules/async-folder-walker/LICENSE | 21 - node_modules/async-folder-walker/README.md | 170 - .../fixtures/another-folder/test.json | 1 - .../fixtures/sub-folder/sub-folder-file.json | 3 - .../sub-sub-folder/sub-sub-folder-file.json | 3 - .../async-folder-walker/fixtures/test.json | 3 - node_modules/async-folder-walker/index.js | 179 - node_modules/async-folder-walker/package.json | 66 - node_modules/async-folder-walker/test.js | 102 - node_modules/async-neocities/CHANGELOG.md | 128 - .../async-neocities/CODE_OF_CONDUCT.md | 4 - node_modules/async-neocities/CONTRIBUTING.md | 26 - node_modules/async-neocities/LICENSE | 21 - node_modules/async-neocities/README.md | 245 - node_modules/async-neocities/index.js | 303 - .../async-neocities/lib/folder-diff.js | 179 - .../async-neocities/lib/folder-diff.test.js | 65 - .../async-neocities/lib/stats-handler.js | 60 - .../async-neocities/lib/stream-meter.js | 38 - node_modules/async-neocities/lib/timer.js | 34 - node_modules/async-neocities/package.json | 84 - node_modules/async-neocities/test.js | 127 - node_modules/asynckit/LICENSE | 21 - node_modules/asynckit/README.md | 233 - node_modules/asynckit/bench.js | 76 - node_modules/asynckit/index.js | 6 - node_modules/asynckit/lib/abort.js | 29 - node_modules/asynckit/lib/async.js | 34 - node_modules/asynckit/lib/defer.js | 26 - node_modules/asynckit/lib/iterate.js | 75 - .../asynckit/lib/readable_asynckit.js | 91 - .../asynckit/lib/readable_parallel.js | 25 - node_modules/asynckit/lib/readable_serial.js | 25 - .../asynckit/lib/readable_serial_ordered.js | 29 - node_modules/asynckit/lib/state.js | 37 - node_modules/asynckit/lib/streamify.js | 141 - node_modules/asynckit/lib/terminator.js | 29 - node_modules/asynckit/package.json | 92 - node_modules/asynckit/parallel.js | 43 - node_modules/asynckit/serial.js | 17 - node_modules/asynckit/serialOrdered.js | 75 - node_modules/asynckit/stream.js | 21 - node_modules/combined-stream/License | 19 - node_modules/combined-stream/Readme.md | 138 - .../combined-stream/lib/combined_stream.js | 208 - node_modules/combined-stream/package.json | 59 - node_modules/combined-stream/yarn.lock | 17 - node_modules/delayed-stream/.npmignore | 1 - node_modules/delayed-stream/License | 19 - node_modules/delayed-stream/Makefile | 7 - node_modules/delayed-stream/Readme.md | 141 - .../delayed-stream/lib/delayed_stream.js | 107 - node_modules/delayed-stream/package.json | 62 - node_modules/duplexify/.travis.yml | 6 - node_modules/duplexify/LICENSE | 21 - node_modules/duplexify/README.md | 97 - node_modules/duplexify/example.js | 21 - node_modules/duplexify/index.js | 238 - node_modules/duplexify/package.json | 66 - node_modules/duplexify/test.js | 338 - node_modules/end-of-stream/LICENSE | 21 - node_modules/end-of-stream/README.md | 54 - node_modules/end-of-stream/index.js | 94 - node_modules/end-of-stream/package.json | 66 - node_modules/fast-fifo/LICENSE | 21 - node_modules/fast-fifo/README.md | 66 - node_modules/fast-fifo/bench.js | 34 - node_modules/fast-fifo/fixed-size.js | 29 - node_modules/fast-fifo/index.js | 32 - node_modules/fast-fifo/package.json | 52 - node_modules/fast-fifo/test.js | 38 - .../fetch-errors/.github/workflows/tests.yml | 25 - node_modules/fetch-errors/CHANGELOG.md | 15 - node_modules/fetch-errors/CODE_OF_CONDUCT.md | 4 - node_modules/fetch-errors/CONTRIBUTING.md | 11 - node_modules/fetch-errors/LICENSE | 21 - node_modules/fetch-errors/README.md | 100 - node_modules/fetch-errors/index.js | 45 - node_modules/fetch-errors/package.json | 66 - node_modules/fetch-errors/test.js | 18 - node_modules/form-data/License | 19 - node_modules/form-data/README.md | 351 - node_modules/form-data/README.md.bak | 351 - node_modules/form-data/index.d.ts | 61 - node_modules/form-data/lib/browser.js | 2 - node_modules/form-data/lib/form_data.js | 494 - node_modules/form-data/lib/populate.js | 10 - node_modules/form-data/package.json | 101 - node_modules/inherits/LICENSE | 16 - node_modules/inherits/README.md | 42 - node_modules/inherits/inherits.js | 9 - node_modules/inherits/inherits_browser.js | 27 - node_modules/inherits/package.json | 106 - node_modules/mime-db/HISTORY.md | 439 - node_modules/mime-db/LICENSE | 22 - node_modules/mime-db/README.md | 100 - node_modules/mime-db/db.json | 8060 ------------ node_modules/mime-db/index.js | 11 - node_modules/mime-db/package.json | 101 - node_modules/mime-types/HISTORY.md | 325 - node_modules/mime-types/LICENSE | 23 - node_modules/mime-types/README.md | 113 - node_modules/mime-types/index.js | 188 - node_modules/mime-types/package.json | 88 - node_modules/ms/index.js | 162 - node_modules/ms/license.md | 21 - node_modules/ms/package.json | 72 - node_modules/ms/readme.md | 60 - node_modules/nanoassert/.travis.yml | 18 - node_modules/nanoassert/LICENSE | 13 - node_modules/nanoassert/README.md | 44 - node_modules/nanoassert/example.js | 5 - node_modules/nanoassert/index.js | 18 - node_modules/nanoassert/package.json | 61 - node_modules/nanoassert/test.js | 27 - node_modules/node-fetch/CHANGELOG.md | 266 - node_modules/node-fetch/LICENSE.md | 22 - node_modules/node-fetch/README.md | 583 - node_modules/node-fetch/browser.js | 23 - node_modules/node-fetch/lib/index.es.js | 1633 --- node_modules/node-fetch/lib/index.js | 1642 --- node_modules/node-fetch/lib/index.mjs | 1631 --- node_modules/node-fetch/package.json | 94 - node_modules/once/LICENSE | 15 - node_modules/once/README.md | 79 - node_modules/once/once.js | 42 - node_modules/once/package.json | 70 - node_modules/pretty-bytes/index.d.ts | 66 - node_modules/pretty-bytes/index.js | 76 - node_modules/pretty-bytes/license | 9 - node_modules/pretty-bytes/package.json | 76 - node_modules/pretty-bytes/readme.md | 89 - node_modules/pump/.travis.yml | 5 - node_modules/pump/LICENSE | 21 - node_modules/pump/README.md | 65 - node_modules/pump/index.js | 82 - node_modules/pump/package.json | 60 - node_modules/pump/test-browser.js | 66 - node_modules/pump/test-node.js | 53 - node_modules/pumpify/.travis.yml | 7 - node_modules/pumpify/LICENSE | 21 - node_modules/pumpify/README.md | 56 - node_modules/pumpify/index.js | 60 - node_modules/pumpify/package.json | 64 - node_modules/pumpify/test.js | 238 - node_modules/qs/.editorconfig | 33 - node_modules/qs/.eslintignore | 1 - node_modules/qs/.eslintrc | 21 - node_modules/qs/.github/FUNDING.yml | 12 - node_modules/qs/CHANGELOG.md | 285 - node_modules/qs/LICENSE.md | 29 - node_modules/qs/README.md | 598 - node_modules/qs/dist/qs.js | 812 -- node_modules/qs/lib/formats.js | 26 - node_modules/qs/lib/index.js | 11 - node_modules/qs/lib/parse.js | 248 - node_modules/qs/lib/stringify.js | 279 - node_modules/qs/lib/utils.js | 236 - node_modules/qs/package.json | 91 - node_modules/qs/test/.eslintrc | 18 - node_modules/qs/test/index.js | 7 - node_modules/qs/test/parse.js | 758 -- node_modules/qs/test/stringify.js | 738 -- node_modules/qs/test/utils.js | 136 - node_modules/readable-stream/CONTRIBUTING.md | 38 - node_modules/readable-stream/GOVERNANCE.md | 136 - node_modules/readable-stream/LICENSE | 47 - node_modules/readable-stream/README.md | 106 - .../readable-stream/errors-browser.js | 127 - node_modules/readable-stream/errors.js | 116 - .../readable-stream/experimentalWarning.js | 17 - .../readable-stream/lib/_stream_duplex.js | 139 - .../lib/_stream_passthrough.js | 39 - .../readable-stream/lib/_stream_readable.js | 1124 -- .../readable-stream/lib/_stream_transform.js | 201 - .../readable-stream/lib/_stream_writable.js | 697 - .../lib/internal/streams/async_iterator.js | 207 - .../lib/internal/streams/buffer_list.js | 210 - .../lib/internal/streams/destroy.js | 105 - .../lib/internal/streams/end-of-stream.js | 104 - .../lib/internal/streams/from-browser.js | 3 - .../lib/internal/streams/from.js | 64 - .../lib/internal/streams/pipeline.js | 97 - .../lib/internal/streams/state.js | 27 - .../lib/internal/streams/stream-browser.js | 1 - .../lib/internal/streams/stream.js | 1 - node_modules/readable-stream/package.json | 99 - .../readable-stream/readable-browser.js | 9 - node_modules/readable-stream/readable.js | 16 - node_modules/safe-buffer/LICENSE | 21 - node_modules/safe-buffer/README.md | 586 - node_modules/safe-buffer/index.d.ts | 187 - node_modules/safe-buffer/index.js | 64 - node_modules/safe-buffer/package.json | 81 - node_modules/stream-shift/.travis.yml | 6 - node_modules/stream-shift/LICENSE | 21 - node_modules/stream-shift/README.md | 25 - node_modules/stream-shift/index.js | 20 - node_modules/stream-shift/package.json | 53 - node_modules/stream-shift/test.js | 48 - node_modules/streamx/.travis.yml | 8 - node_modules/streamx/LICENSE | 21 - node_modules/streamx/README.md | 366 - node_modules/streamx/examples/fs.js | 73 - node_modules/streamx/index.js | 855 -- node_modules/streamx/package.json | 56 - node_modules/streamx/test/async-iterator.js | 21 - node_modules/streamx/test/backpressure.js | 105 - node_modules/streamx/test/compat.js | 178 - node_modules/streamx/test/pipe.js | 134 - node_modules/streamx/test/readable.js | 119 - node_modules/streamx/test/writable.js | 103 - node_modules/string_decoder/LICENSE | 48 - node_modules/string_decoder/README.md | 47 - .../string_decoder/lib/string_decoder.js | 296 - node_modules/string_decoder/package.json | 63 - node_modules/util-deprecate/History.md | 16 - node_modules/util-deprecate/LICENSE | 24 - node_modules/util-deprecate/README.md | 53 - node_modules/util-deprecate/browser.js | 67 - node_modules/util-deprecate/node.js | 6 - node_modules/util-deprecate/package.json | 69 - node_modules/wrappy/LICENSE | 15 - node_modules/wrappy/README.md | 36 - node_modules/wrappy/package.json | 60 - node_modules/wrappy/wrappy.js | 33 - package.json | 8 +- 243 files changed, 14829 insertions(+), 36891 deletions(-) create mode 100644 dist/index.js create mode 100644 dist/index.js.map create mode 100644 dist/sourcemap-register.js delete mode 100644 node_modules/@actions/core/README.md delete mode 100644 node_modules/@actions/core/lib/command.d.ts delete mode 100644 node_modules/@actions/core/lib/command.js delete mode 100644 node_modules/@actions/core/lib/command.js.map delete mode 100644 node_modules/@actions/core/lib/core.d.ts delete mode 100644 node_modules/@actions/core/lib/core.js delete mode 100644 node_modules/@actions/core/lib/core.js.map delete mode 100644 node_modules/@actions/core/package.json delete mode 100644 node_modules/async-folder-walker/.github/workflows/tests.yml delete mode 100644 node_modules/async-folder-walker/.vscode/launch.json delete mode 100644 node_modules/async-folder-walker/CHANGELOG.md delete mode 100644 node_modules/async-folder-walker/CODE_OF_CONDUCT.md delete mode 100644 node_modules/async-folder-walker/CONTRIBUTING.md delete mode 100644 node_modules/async-folder-walker/LICENSE delete mode 100644 node_modules/async-folder-walker/README.md delete mode 100644 node_modules/async-folder-walker/fixtures/another-folder/test.json delete mode 100644 node_modules/async-folder-walker/fixtures/sub-folder/sub-folder-file.json delete mode 100644 node_modules/async-folder-walker/fixtures/sub-folder/sub-sub-folder/sub-sub-folder-file.json delete mode 100644 node_modules/async-folder-walker/fixtures/test.json delete mode 100644 node_modules/async-folder-walker/index.js delete mode 100644 node_modules/async-folder-walker/package.json delete mode 100644 node_modules/async-folder-walker/test.js delete mode 100644 node_modules/async-neocities/CHANGELOG.md delete mode 100644 node_modules/async-neocities/CODE_OF_CONDUCT.md delete mode 100644 node_modules/async-neocities/CONTRIBUTING.md delete mode 100644 node_modules/async-neocities/LICENSE delete mode 100644 node_modules/async-neocities/README.md delete mode 100644 node_modules/async-neocities/index.js delete mode 100644 node_modules/async-neocities/lib/folder-diff.js delete mode 100644 node_modules/async-neocities/lib/folder-diff.test.js delete mode 100644 node_modules/async-neocities/lib/stats-handler.js delete mode 100644 node_modules/async-neocities/lib/stream-meter.js delete mode 100644 node_modules/async-neocities/lib/timer.js delete mode 100644 node_modules/async-neocities/package.json delete mode 100644 node_modules/async-neocities/test.js delete mode 100644 node_modules/asynckit/LICENSE delete mode 100644 node_modules/asynckit/README.md delete mode 100644 node_modules/asynckit/bench.js delete mode 100644 node_modules/asynckit/index.js delete mode 100644 node_modules/asynckit/lib/abort.js delete mode 100644 node_modules/asynckit/lib/async.js delete mode 100644 node_modules/asynckit/lib/defer.js delete mode 100644 node_modules/asynckit/lib/iterate.js delete mode 100644 node_modules/asynckit/lib/readable_asynckit.js delete mode 100644 node_modules/asynckit/lib/readable_parallel.js delete mode 100644 node_modules/asynckit/lib/readable_serial.js delete mode 100644 node_modules/asynckit/lib/readable_serial_ordered.js delete mode 100644 node_modules/asynckit/lib/state.js delete mode 100644 node_modules/asynckit/lib/streamify.js delete mode 100644 node_modules/asynckit/lib/terminator.js delete mode 100644 node_modules/asynckit/package.json delete mode 100644 node_modules/asynckit/parallel.js delete mode 100644 node_modules/asynckit/serial.js delete mode 100644 node_modules/asynckit/serialOrdered.js delete mode 100644 node_modules/asynckit/stream.js delete mode 100644 node_modules/combined-stream/License delete mode 100644 node_modules/combined-stream/Readme.md delete mode 100644 node_modules/combined-stream/lib/combined_stream.js delete mode 100644 node_modules/combined-stream/package.json delete mode 100644 node_modules/combined-stream/yarn.lock delete mode 100644 node_modules/delayed-stream/.npmignore delete mode 100644 node_modules/delayed-stream/License delete mode 100644 node_modules/delayed-stream/Makefile delete mode 100644 node_modules/delayed-stream/Readme.md delete mode 100644 node_modules/delayed-stream/lib/delayed_stream.js delete mode 100644 node_modules/delayed-stream/package.json delete mode 100644 node_modules/duplexify/.travis.yml delete mode 100644 node_modules/duplexify/LICENSE delete mode 100644 node_modules/duplexify/README.md delete mode 100644 node_modules/duplexify/example.js delete mode 100644 node_modules/duplexify/index.js delete mode 100644 node_modules/duplexify/package.json delete mode 100644 node_modules/duplexify/test.js delete mode 100644 node_modules/end-of-stream/LICENSE delete mode 100644 node_modules/end-of-stream/README.md delete mode 100644 node_modules/end-of-stream/index.js delete mode 100644 node_modules/end-of-stream/package.json delete mode 100644 node_modules/fast-fifo/LICENSE delete mode 100644 node_modules/fast-fifo/README.md delete mode 100644 node_modules/fast-fifo/bench.js delete mode 100644 node_modules/fast-fifo/fixed-size.js delete mode 100644 node_modules/fast-fifo/index.js delete mode 100644 node_modules/fast-fifo/package.json delete mode 100644 node_modules/fast-fifo/test.js delete mode 100644 node_modules/fetch-errors/.github/workflows/tests.yml delete mode 100644 node_modules/fetch-errors/CHANGELOG.md delete mode 100644 node_modules/fetch-errors/CODE_OF_CONDUCT.md delete mode 100644 node_modules/fetch-errors/CONTRIBUTING.md delete mode 100644 node_modules/fetch-errors/LICENSE delete mode 100644 node_modules/fetch-errors/README.md delete mode 100644 node_modules/fetch-errors/index.js delete mode 100644 node_modules/fetch-errors/package.json delete mode 100644 node_modules/fetch-errors/test.js delete mode 100644 node_modules/form-data/License delete mode 100644 node_modules/form-data/README.md delete mode 100644 node_modules/form-data/README.md.bak delete mode 100644 node_modules/form-data/index.d.ts delete mode 100644 node_modules/form-data/lib/browser.js delete mode 100644 node_modules/form-data/lib/form_data.js delete mode 100644 node_modules/form-data/lib/populate.js delete mode 100644 node_modules/form-data/package.json delete mode 100644 node_modules/inherits/LICENSE delete mode 100644 node_modules/inherits/README.md delete mode 100644 node_modules/inherits/inherits.js delete mode 100644 node_modules/inherits/inherits_browser.js delete mode 100644 node_modules/inherits/package.json delete mode 100644 node_modules/mime-db/HISTORY.md delete mode 100644 node_modules/mime-db/LICENSE delete mode 100644 node_modules/mime-db/README.md delete mode 100644 node_modules/mime-db/db.json delete mode 100644 node_modules/mime-db/index.js delete mode 100644 node_modules/mime-db/package.json delete mode 100644 node_modules/mime-types/HISTORY.md delete mode 100644 node_modules/mime-types/LICENSE delete mode 100644 node_modules/mime-types/README.md delete mode 100644 node_modules/mime-types/index.js delete mode 100644 node_modules/mime-types/package.json delete mode 100644 node_modules/ms/index.js delete mode 100644 node_modules/ms/license.md delete mode 100644 node_modules/ms/package.json delete mode 100644 node_modules/ms/readme.md delete mode 100644 node_modules/nanoassert/.travis.yml delete mode 100644 node_modules/nanoassert/LICENSE delete mode 100644 node_modules/nanoassert/README.md delete mode 100644 node_modules/nanoassert/example.js delete mode 100644 node_modules/nanoassert/index.js delete mode 100644 node_modules/nanoassert/package.json delete mode 100644 node_modules/nanoassert/test.js delete mode 100644 node_modules/node-fetch/CHANGELOG.md delete mode 100644 node_modules/node-fetch/LICENSE.md delete mode 100644 node_modules/node-fetch/README.md delete mode 100644 node_modules/node-fetch/browser.js delete mode 100644 node_modules/node-fetch/lib/index.es.js delete mode 100644 node_modules/node-fetch/lib/index.js delete mode 100644 node_modules/node-fetch/lib/index.mjs delete mode 100644 node_modules/node-fetch/package.json delete mode 100644 node_modules/once/LICENSE delete mode 100644 node_modules/once/README.md delete mode 100644 node_modules/once/once.js delete mode 100644 node_modules/once/package.json delete mode 100644 node_modules/pretty-bytes/index.d.ts delete mode 100644 node_modules/pretty-bytes/index.js delete mode 100644 node_modules/pretty-bytes/license delete mode 100644 node_modules/pretty-bytes/package.json delete mode 100644 node_modules/pretty-bytes/readme.md delete mode 100644 node_modules/pump/.travis.yml delete mode 100644 node_modules/pump/LICENSE delete mode 100644 node_modules/pump/README.md delete mode 100644 node_modules/pump/index.js delete mode 100644 node_modules/pump/package.json delete mode 100644 node_modules/pump/test-browser.js delete mode 100644 node_modules/pump/test-node.js delete mode 100644 node_modules/pumpify/.travis.yml delete mode 100644 node_modules/pumpify/LICENSE delete mode 100644 node_modules/pumpify/README.md delete mode 100644 node_modules/pumpify/index.js delete mode 100644 node_modules/pumpify/package.json delete mode 100644 node_modules/pumpify/test.js delete mode 100644 node_modules/qs/.editorconfig delete mode 100644 node_modules/qs/.eslintignore delete mode 100644 node_modules/qs/.eslintrc delete mode 100644 node_modules/qs/.github/FUNDING.yml delete mode 100644 node_modules/qs/CHANGELOG.md delete mode 100644 node_modules/qs/LICENSE.md delete mode 100644 node_modules/qs/README.md delete mode 100644 node_modules/qs/dist/qs.js delete mode 100644 node_modules/qs/lib/formats.js delete mode 100644 node_modules/qs/lib/index.js delete mode 100644 node_modules/qs/lib/parse.js delete mode 100644 node_modules/qs/lib/stringify.js delete mode 100644 node_modules/qs/lib/utils.js delete mode 100644 node_modules/qs/package.json delete mode 100644 node_modules/qs/test/.eslintrc delete mode 100644 node_modules/qs/test/index.js delete mode 100644 node_modules/qs/test/parse.js delete mode 100644 node_modules/qs/test/stringify.js delete mode 100644 node_modules/qs/test/utils.js delete mode 100644 node_modules/readable-stream/CONTRIBUTING.md delete mode 100644 node_modules/readable-stream/GOVERNANCE.md delete mode 100644 node_modules/readable-stream/LICENSE delete mode 100644 node_modules/readable-stream/README.md delete mode 100644 node_modules/readable-stream/errors-browser.js delete mode 100644 node_modules/readable-stream/errors.js delete mode 100644 node_modules/readable-stream/experimentalWarning.js delete mode 100644 node_modules/readable-stream/lib/_stream_duplex.js delete mode 100644 node_modules/readable-stream/lib/_stream_passthrough.js delete mode 100644 node_modules/readable-stream/lib/_stream_readable.js delete mode 100644 node_modules/readable-stream/lib/_stream_transform.js delete mode 100644 node_modules/readable-stream/lib/_stream_writable.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/async_iterator.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/buffer_list.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/destroy.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/end-of-stream.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/from-browser.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/from.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/pipeline.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/state.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/stream-browser.js delete mode 100644 node_modules/readable-stream/lib/internal/streams/stream.js delete mode 100644 node_modules/readable-stream/package.json delete mode 100644 node_modules/readable-stream/readable-browser.js delete mode 100644 node_modules/readable-stream/readable.js delete mode 100644 node_modules/safe-buffer/LICENSE delete mode 100644 node_modules/safe-buffer/README.md delete mode 100644 node_modules/safe-buffer/index.d.ts delete mode 100644 node_modules/safe-buffer/index.js delete mode 100644 node_modules/safe-buffer/package.json delete mode 100644 node_modules/stream-shift/.travis.yml delete mode 100644 node_modules/stream-shift/LICENSE delete mode 100644 node_modules/stream-shift/README.md delete mode 100644 node_modules/stream-shift/index.js delete mode 100644 node_modules/stream-shift/package.json delete mode 100644 node_modules/stream-shift/test.js delete mode 100644 node_modules/streamx/.travis.yml delete mode 100644 node_modules/streamx/LICENSE delete mode 100644 node_modules/streamx/README.md delete mode 100644 node_modules/streamx/examples/fs.js delete mode 100644 node_modules/streamx/index.js delete mode 100644 node_modules/streamx/package.json delete mode 100644 node_modules/streamx/test/async-iterator.js delete mode 100644 node_modules/streamx/test/backpressure.js delete mode 100644 node_modules/streamx/test/compat.js delete mode 100644 node_modules/streamx/test/pipe.js delete mode 100644 node_modules/streamx/test/readable.js delete mode 100644 node_modules/streamx/test/writable.js delete mode 100644 node_modules/string_decoder/LICENSE delete mode 100644 node_modules/string_decoder/README.md delete mode 100644 node_modules/string_decoder/lib/string_decoder.js delete mode 100644 node_modules/string_decoder/package.json delete mode 100644 node_modules/util-deprecate/History.md delete mode 100644 node_modules/util-deprecate/LICENSE delete mode 100644 node_modules/util-deprecate/README.md delete mode 100644 node_modules/util-deprecate/browser.js delete mode 100644 node_modules/util-deprecate/node.js delete mode 100644 node_modules/util-deprecate/package.json delete mode 100644 node_modules/wrappy/LICENSE delete mode 100644 node_modules/wrappy/README.md delete mode 100644 node_modules/wrappy/package.json delete mode 100644 node_modules/wrappy/wrappy.js diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 0000000..6d596aa --- /dev/null +++ b/dist/index.js @@ -0,0 +1,10890 @@ +require('./sourcemap-register.js');module.exports = +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(104); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ({ + +/***/ 9: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(49); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 11: +/***/ (function(module) { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 13: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +var util = __webpack_require__(581); + +var Format = { + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; + +module.exports = util.assign( + { + 'default': Format.RFC3986, + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return String(value); + } + } + }, + Format +); + + +/***/ }), + +/***/ 18: +/***/ (function() { + +eval("require")("encoding"); + + +/***/ }), + +/***/ 46: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = __webpack_require__(287); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 49: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var wrappy = __webpack_require__(11) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 69: +/***/ (function(module) { + +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; + + +/***/ }), + +/***/ 87: +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), + +/***/ 91: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var serialOrdered = __webpack_require__(892); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} + + +/***/ }), + +/***/ 98: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const crypto = __webpack_require__(417) +const util = __webpack_require__(669) +const fs = __webpack_require__(747) + +const ppump = util.promisify(__webpack_require__(453)) + +/** + * neocitiesLocalDiff returns an array of files to delete and update and some useful stats. + * @param {Array} neocitiesFiles Array of files returned from the neocities list api. + * @param {Array} localListing Array of files returned by a full data async-folder-walker run. + * @return {Promise} Object of filesToUpload, filesToDelete and filesSkipped. + */ +async function neocitiesLocalDiff (neocitiesFiles, localListing) { + const localIndex = {} + const ncIndex = {} + + const neoCitiesFiltered = neocitiesFiles.filter(f => !f.is_directory) + neoCitiesFiltered.forEach(f => { ncIndex[f.path] = f }) // index + const ncFiles = new Set(neoCitiesFiltered.map(f => f.path)) // shape + + const localListingFiltered = localListing.filter(f => !f.stat.isDirectory()) // files only + localListingFiltered.forEach(f => { localIndex[forceUnixRelname(f.relname)] = f }) // index + const localFiles = new Set(localListingFiltered.map(f => forceUnixRelname(f.relname))) // shape + + const filesToAdd = difference(localFiles, ncFiles) + const filesToDelete = difference(ncFiles, localFiles) + + const maybeUpdate = intersection(localFiles, ncFiles) + const skipped = new Set() + + for (const p of maybeUpdate) { + const local = localIndex[p] + const remote = ncIndex[p] + + if (local.stat.size !== remote.size) { filesToAdd.add(p); continue } + + const localSha1 = await sha1FromPath(local.filepath) + if (localSha1 !== remote.sha1_hash) { filesToAdd.add(p); continue } + + skipped.add(p) + } + + return { + filesToUpload: Array.from(filesToAdd).map(p => ({ + name: forceUnixRelname(localIndex[p].relname), + path: localIndex[p].filepath + })), + filesToDelete: Array.from(filesToDelete).map(p => ncIndex[p].path), + filesSkipped: Array.from(skipped).map(p => localIndex[p]) + } +} + +module.exports = { + neocitiesLocalDiff +} + +/** + * sha1FromPath returns a sha1 hex from a path + * @param {String} p string of the path of the file to hash + * @return {Promise} the hex representation of the sha1 + */ +async function sha1FromPath (p) { + const rs = fs.createReadStream(p) + const hash = crypto.createHash('sha1') + + await ppump(rs, hash) + + return hash.digest('hex') +} + +// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set#Implementing_basic_set_operations + +/** + * difference returnss the difference betwen setA and setB. + * @param {Set} setA LHS set + * @param {Set} setB RHS set + * @return {Set} The difference Set + */ +function difference (setA, setB) { + const _difference = new Set(setA) + for (const elem of setB) { + _difference.delete(elem) + } + return _difference +} + +/** + * intersection returns the interesction between setA and setB. + * @param {Set} setA setA LHS set + * @param {Set} setB setB RHS set + * @return {Set} The intersection set between setA and setB. + */ +function intersection (setA, setB) { + const _intersection = new Set() + for (const elem of setB) { + if (setA.has(elem)) { + _intersection.add(elem) + } + } + return _intersection +} + +/** + * forceUnixRelname forces a OS dependent path to a unix style path. + * @param {String} relname String path to convert to unix style. + * @return {String} The unix variant of the path + */ +function forceUnixRelname (relname) { + return relname.split(relname.sep).join('/') +} + +/** + * Example of neocitiesFiles + */ +// [ +// { +// path: 'img', +// is_directory: true, +// updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000' +// }, +// { +// path: 'index.html', +// is_directory: false, +// size: 1094, +// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', +// sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d' +// }, +// { +// path: 'neocities.png', +// is_directory: false, +// size: 13232, +// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', +// sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4' +// }, +// { +// path: 'not_found.html', +// is_directory: false, +// size: 347, +// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', +// sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030' +// }, +// { +// path: 'style.css', +// is_directory: false, +// size: 298, +// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', +// sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8' +// } +// ] + +/** + * Example of localListing + */ +// [{ +// root: '/Users/bret/repos/async-folder-walker/fixtures', +// filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder', +// stat: Stats { +// dev: 16777220, +// mode: 16877, +// nlink: 3, +// uid: 501, +// gid: 20, +// rdev: 0, +// blksize: 4096, +// ino: 30244023, +// size: 96, +// blocks: 0, +// atimeMs: 1574381262779.8396, +// mtimeMs: 1574380914743.5474, +// ctimeMs: 1574380914743.5474, +// birthtimeMs: 1574380905232.5996, +// atime: 2019-11-22T00:07:42.780Z, +// mtime: 2019-11-22T00:01:54.744Z, +// ctime: 2019-11-22T00:01:54.744Z, +// birthtime: 2019-11-22T00:01:45.233Z +// }, +// relname: 'sub-folder/sub-sub-folder', +// basename: 'sub-sub-folder' +// }] + + +/***/ }), + +/***/ 104: +/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { + +const core = __webpack_require__(470) +// const github = require('@actions/github') +const Neocities = __webpack_require__(248) +const path = __webpack_require__(622) +const ms = __webpack_require__(317) +const assert = __webpack_require__(487) +const fsp = __webpack_require__(747).promises + +async function doDeploy () { + const token = core.getInput('api_token') + const distDir = path.join(process.cwd(), core.getInput('dist_dir')) + const cleanup = JSON.parse(core.getInput('cleanup')) + + assert(typeof cleanup === 'boolean', 'Cleanup input must be a boolean "true" or "false"') + const stat = await fsp.stat(distDir) + assert(stat.isDirectory(), 'dist_dir must be a directory that exists') + + const client = new Neocities(token) + + const stats = await client.deploy(distDir, { + cleanup, + statsCb: Neocities.statsHandler() + }) + + console.log(`Deployed to Neocities in ${ms(stats.time)}:`) + console.log(` Uploaded ${stats.filesToUpload.length} files`) + console.log(` ${cleanup ? 'Deleted' : 'Orphaned'} ${stats.filesToDelete.length} files`) + console.log(` Skipped ${stats.filesSkipped.length} files`) +} + +doDeploy().catch(err => { + console.error(err) + core.setFailed(err.message) +}) + + +/***/ }), + +/***/ 108: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { Writable, Transform } = __webpack_require__(745) +const pump = __webpack_require__(453) +const pumpify = __webpack_require__(746) + +function getStreamLength (readable) { + let length = 0 + + const dummyLoad = new Writable({ + write (data, cb) { + length += data.length + cb(null) + } + }) + + return new Promise((resolve, reject) => { + pump(readable, dummyLoad, (err) => { + if (err) return reject(err) + resolve(length) + }) + }) +} + +function meterStream (readable, statsCb) { + let bytesRead = 0 + const meter = new Transform({ + transform (data, cb) { + bytesRead += data.length + statsCb(bytesRead) + cb(null, data) + } + }) + return pumpify(readable, meter) +} + +module.exports = { + getStreamLength, + meterStream +} + + +/***/ }), + +/***/ 134: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const FixedFIFO = __webpack_require__(740) + +module.exports = class FastFIFO { + constructor (hwm) { + this.hwm = hwm || 16 + this.head = new FixedFIFO(this.hwm) + this.tail = this.head + } + + push (val) { + if (!this.head.push(val)) { + const prev = this.head + this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length) + this.head.push(val) + } + } + + shift () { + const val = this.tail.shift() + if (val === undefined && this.tail.next) { + const next = this.tail.next + this.tail.next = null + this.tail = next + return this.tail.shift() + } + return val + } + + isEmpty () { + return this.head.isEmpty() + } +} + + +/***/ }), + +/***/ 147: +/***/ (function(module) { + +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} + + +/***/ }), + +/***/ 149: +/***/ (function(module, exports, __webpack_require__) { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __webpack_require__(293) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 152: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var Stream = __webpack_require__(413).Stream; +var util = __webpack_require__(669); + +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; + + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); + +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } + + delayedStream.source = source; + + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; + + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } + + return delayedStream; +}; + +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); + +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; + +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } + + this.source.resume(); +}; + +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; + +DelayedStream.prototype.release = function() { + this._released = true; + + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; + +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; + +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } + + this._bufferedEvents.push(args); +}; + +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } + + if (this.dataSize <= this.maxDataSize) { + return; + } + + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; + + +/***/ }), + +/***/ 157: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var async = __webpack_require__(751) + , abort = __webpack_require__(566) + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} + + +/***/ }), + +/***/ 176: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = __webpack_require__(563).codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; + +/***/ }), + +/***/ 211: +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), + +/***/ 216: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = __webpack_require__(563).codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; + +/***/ }), + +/***/ 226: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = __webpack_require__(614).EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = __webpack_require__(427); +/**/ + + +var Buffer = __webpack_require__(293).Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = __webpack_require__(669); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = __webpack_require__(896); + +var destroyImpl = __webpack_require__(232); + +var _require = __webpack_require__(216), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = __webpack_require__(563).codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +__webpack_require__(689)(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __webpack_require__(831); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = __webpack_require__(674).StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || __webpack_require__(831); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __webpack_require__(674).StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __webpack_require__(46); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __webpack_require__(176); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} + +/***/ }), + +/***/ 232: +/***/ (function(module) { + +"use strict"; + // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; +} + +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; + +/***/ }), + +/***/ 238: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = __webpack_require__(563).codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __webpack_require__(287); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; + +/***/ }), + +/***/ 241: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: __webpack_require__(917) +}; +/**/ + +/**/ + +var Stream = __webpack_require__(427); +/**/ + + +var Buffer = __webpack_require__(293).Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = __webpack_require__(232); + +var _require = __webpack_require__(216), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = __webpack_require__(563).codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +__webpack_require__(689)(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __webpack_require__(831); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || __webpack_require__(831); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 248: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { handleResponse } = __webpack_require__(823) +const { createReadStream } = __webpack_require__(747) +const afw = __webpack_require__(722) +const FormData = __webpack_require__(928) +const assert = __webpack_require__(487) +const fetch = __webpack_require__(454) +const { URL } = __webpack_require__(835) +const qs = __webpack_require__(386) +const os = __webpack_require__(87) + +const { neocitiesLocalDiff } = __webpack_require__(98) +const pkg = __webpack_require__(442) +const SimpleTimer = __webpack_require__(979) +const { getStreamLength, meterStream } = __webpack_require__(108) +const statsHandler = __webpack_require__(423) + +const defaultURL = 'https://neocities.org' + +// Progress API constants +const START = 'start' +const PROGRESS = 'progress' // progress updates +const STOP = 'stop' +const SKIP = 'skip' +// Progress stages +const INSPECTING = 'inspecting' +const DIFFING = 'diffing' +const APPLYING = 'applying' + +/** + * NeocitiesAPIClient class representing a neocities api client. + */ +class NeocitiesAPIClient { + /** + * getKey returns an apiKey from a sitename and password. + * @param {String} sitename username/sitename to log into. + * @param {String} password password to log in with. + * @param {Object} [opts] Options object. + * @param {Object} [opts.url=https://neocities.org] Base URL to request to. + * @return {Promise} An api key for the sitename.. + */ + static getKey (sitename, password, opts) { + assert(sitename, 'must pass sitename as first arg') + assert(typeof sitename === 'string', 'user arg must be a string') + assert(password, 'must pass a password as the second arg') + assert(typeof password, 'password arg must be a string') + + opts = Object.assign({ + url: defaultURL + }, opts) + + const baseURL = opts.url + delete opts.url + + const url = new URL('/api/key', baseURL) + url.username = sitename + url.password = password + return fetch(url, opts) + } + + static statsHandler (...args) { return statsHandler(...args) } + + /** + * Create an async-neocities api client. + * @param {string} apiKey An apiKey to make requests with. + * @param {Object} [opts] Options object. + * @param {Object} [opts.url=https://neocities.org] Base URL to make requests to. + * @return {Object} An api client instance. + */ + constructor (apiKey, opts) { + assert(apiKey, 'must pass apiKey as first argument') + assert(typeof apiKey === 'string', 'apiKey must be a string') + opts = Object.assign({ + url: defaultURL + }) + + this.url = opts.url + this.apiKey = apiKey + } + + get defaultHeaders () { + return { + Authorization: `Bearer ${this.apiKey}`, + Accept: 'application/json', + 'User-Agent': `async-neocities/${pkg.version} (${os.type()})` + } + } + + /** + * Generic GET request to neocities. + * @param {String} endpoint An endpoint path to GET request. + * @param {Object} [quieries] An object that gets added to the request in the form of a query string. + * @param {Object} [opts] Options object. + * @param {String} [opts.method=GET] The http method to use. + * @param {Object} [opts.headers] Headers to include in the request. + * @return {Object} The parsed JSON from the request response. + */ + get (endpoint, quieries, opts) { + assert(endpoint, 'must pass endpoint as first argument') + opts = Object.assign({ + method: 'GET' + }, opts) + opts.headers = Object.assign({}, this.defaultHeaders, opts.headers) + + let path = `/api/${endpoint}` + if (quieries) path += `?${qs.stringify(quieries)}` + + const url = new URL(path, this.url) + return fetch(url, opts) + } + + /** + * Low level POST request to neocities with FormData. + * @param {String} endpoint The endpoint to make the request to. + * @param {Array.<{name: String, value: String}>} formEntries Array of form entries. + * @param {Object} [opts] Options object. + * @param {String} [opts.method=POST] HTTP Method. + * @param {Object} [opts.headers] Additional headers to send. + * @return {Object} The parsed JSON response object. + */ + async post (endpoint, formEntries, opts) { + assert(endpoint, 'must pass endpoint as first argument') + assert(formEntries, 'must pass formEntries as second argument') + + function createForm () { + const form = new FormData() + for (const { name, value } of formEntries) { + form.append(name, value) + } + + return form + } + + opts = Object.assign({ + method: 'POST', + statsCb: () => {} + }, opts) + const statsCb = opts.statsCb + delete opts.statsCb + + const stats = { + totalBytes: await getStreamLength(createForm()), + bytesWritten: 0 + } + statsCb(stats) + + const form = createForm() + + opts.body = meterStream(form, bytesRead => { + stats.bytesWritten = bytesRead + statsCb(stats) + }) + + opts.headers = Object.assign( + {}, + this.defaultHeaders, + form.getHeaders(), + opts.headers) + + const url = new URL(`/api/${endpoint}`, this.url) + return fetch(url, opts) + } + + /** + * Upload files to neocities + */ + upload (files, opts = {}) { + opts = { + statsCb: () => {}, + ...opts + } + const formEntries = files.map(({ name, path }) => { + const streamCtor = (next) => next(createReadStream(path)) + streamCtor.path = path + return { + name, + value: streamCtor + } + }) + + return this.post('upload', formEntries, { statsCb: opts.statsCb }).then(handleResponse) + } + + /** + * delete files from your website + */ + delete (filenames, opts = {}) { + assert(filenames, 'filenames is a required first argument') + assert(Array.isArray(filenames), 'filenames argument must be an array of file paths in your website') + opts = { + statsCb: () => {}, + ...opts + } + + const formEntries = filenames.map(file => ({ + name: 'filenames[]', + value: file + })) + + return this.post('delete', formEntries, { statsCb: opts.statsCb }).then(handleResponse) + } + + list (queries) { + // args.path: Path to list + return this.get('list', queries).then(handleResponse) + } + + /** + * info returns info on your site, or optionally on a sitename querystrign + * @param {Object} args Querystring arguments to include (e.g. sitename) + * @return {Promise} Fetch request promise + */ + info (queries) { + // args.sitename: sitename to get info on + return this.get('info', queries).then(handleResponse) + } + + /** + * Deploy a directory to neocities, skipping already uploaded files and optionally cleaning orphaned files. + * @param {String} directory The path of the directory to deploy. + * @param {Object} opts Options object. + * @param {Boolean} opts.cleanup Boolean to delete orphaned files nor not. Defaults to false. + * @param {Boolean} opts.statsCb Get access to stat info before uploading is complete. + * @return {Promise} Promise containing stats about the deploy + */ + async deploy (directory, opts) { + opts = { + cleanup: false, // delete remote orphaned files + statsCb: () => {}, + ...opts + } + + const statsCb = opts.statsCb + const totalTime = new SimpleTimer(Date.now()) + + // INSPECTION STAGE + statsCb({ stage: INSPECTING, status: START }) + const [localFiles, remoteFiles] = await Promise.all([ + afw.allFiles(directory, { shaper: f => f }), + this.list().then(res => res.files) + ]) + statsCb({ stage: INSPECTING, status: STOP }) + + // DIFFING STAGE + statsCb({ stage: DIFFING, status: START }) + const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles) + statsCb({ stage: DIFFING, status: STOP }) + + // APPLYING STAGE + if (filesToUpload.length === 0 && (!opts.cleanup || filesToDelete.length === 0)) { + statsCb({ stage: APPLYING, status: SKIP }) + return stats() + } + + statsCb({ stage: APPLYING, status: START }) + const work = [] + + if (filesToUpload.length > 0) { + const uploadJob = this.upload(filesToUpload, { + statsCb ({ totalBytes, bytesWritten }) { + statsCb({ + stage: APPLYING, + status: PROGRESS, + complete: false, + totalBytes, + bytesWritten, + get progress () { + return (this.bytesWritten / this.totalBytes) || 0 + } + }) + } + }).then((_) => { + statsCb({ + stage: APPLYING, + status: PROGRESS, + complete: true, + progress: 1.0 + }) + }) + work.push(uploadJob) + } + + if (opts.cleanup && filesToDelete.length > 0) { + work.push(this.delete(filesToDelete)) + } + + await Promise.all(work) + statsCb({ stage: APPLYING, status: STOP }) + + return stats() + + function stats () { + totalTime.stop() + return { + time: totalTime.elapsed, + filesToUpload, + filesToDelete, + filesSkipped + } + } + } +} + +module.exports = NeocitiesAPIClient + + +/***/ }), + +/***/ 287: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + + +var ERR_STREAM_PREMATURE_CLOSE = __webpack_require__(563).codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; + +/***/ }), + +/***/ 293: +/***/ (function(module) { + +module.exports = require("buffer"); + +/***/ }), + +/***/ 315: +/***/ (function(module) { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 317: +/***/ (function(module) { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + +/***/ }), + +/***/ 334: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = +{ + parallel : __webpack_require__(424), + serial : __webpack_require__(91), + serialOrdered : __webpack_require__(892) +}; + + +/***/ }), + +/***/ 386: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var stringify = __webpack_require__(897); +var parse = __webpack_require__(755); +var formats = __webpack_require__(13); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; + + +/***/ }), + +/***/ 394: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var stream = __webpack_require__(574) +var eos = __webpack_require__(9) +var inherits = __webpack_require__(689) +var shift = __webpack_require__(475) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 413: +/***/ (function(module) { + +module.exports = require("stream"); + +/***/ }), + +/***/ 417: +/***/ (function(module) { + +module.exports = require("crypto"); + +/***/ }), + +/***/ 423: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const prettyBytes = __webpack_require__(589) + +// Progress API constants +const START = 'start' +const PROGRESS = 'progress' // progress updates +const STOP = 'stop' +const SKIP = 'skip' + +function statsHandler (opts = {}) { + let progressInterval + const lastStats = {} + + return (stats) => { + switch (stats.status) { + case START: { + console.log(`Starting ${stats.stage} stage...`) + break + } + case STOP: { + console.log(`Finished ${stats.stage} stage.`) + break + } + case SKIP: { + console.log(`Skipping ${stats.stage} stage.`) + break + } + case PROGRESS: { + progressHandler(stats) + break + } + default: { + } + } + } + + function progressHandler (stats) { + Object.assign(lastStats, stats) + if (!stats.complete && stats.progress < 1) { + if (!progressInterval) { + progressInterval = setInterval(logProgress, 500, lastStats) + logProgress(lastStats) + } + } else { + if (progressInterval) { + clearInterval(progressInterval) + logProgress(lastStats) + } + } + } + + function logProgress (stats) { + let logLine = `Stage ${stats.stage}: ${(stats.progress * 100).toFixed(2)}%` + if (stats.bytesWritten != null && stats.totalBytes != null) { + logLine = logLine + ` (${prettyBytes(stats.bytesWritten)} / ${prettyBytes(stats.totalBytes)})` + } + console.log(logLine) + } +} + +module.exports = statsHandler + + +/***/ }), + +/***/ 424: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var iterate = __webpack_require__(157) + , initState = __webpack_require__(147) + , terminator = __webpack_require__(939) + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} + + +/***/ }), + +/***/ 427: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(413); + + +/***/ }), + +/***/ 431: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return (s || '') + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return (s || '') + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 442: +/***/ (function(module) { + +module.exports = {"_from":"async-neocities@1.1.6","_id":"async-neocities@1.1.6","_inBundle":false,"_integrity":"sha512-q5fTVttBaN9znGxqxxDAh/ks+bZngIJPu6zPS7nlbJLC9NnOhrcP5Mu0VntxgEBtAuaExyI6uH/C+CxKSW0LeQ==","_location":"/async-neocities","_phantomChildren":{},"_requested":{"type":"version","registry":true,"raw":"async-neocities@1.1.6","name":"async-neocities","escapedName":"async-neocities","rawSpec":"1.1.6","saveSpec":null,"fetchSpec":"1.1.6"},"_requiredBy":["/"],"_resolved":"https://registry.npmjs.org/async-neocities/-/async-neocities-1.1.6.tgz","_shasum":"405b45565ccbe9c4ea56e65552ae9c48c20a0309","_spec":"async-neocities@1.1.6","_where":"/Users/bret/repos/deploy-to-neocities","author":{"name":"Bret Comnes","email":"bcomnes@gmail.com","url":"https://bret.io"},"bugs":{"url":"https://github.com/bcomnes/async-neocities/issues"},"bundleDependencies":false,"dependencies":{"async-folder-walker":"^2.0.1","fetch-errors":"^2.0.1","form-data":"^3.0.0","nanoassert":"^2.0.0","node-fetch":"^2.6.0","pretty-bytes":"^5.3.0","pump":"^3.0.0","pumpify":"^2.0.1","qs":"^6.9.1","streamx":"^2.6.0"},"deprecated":false,"description":"WIP - nothing to see here","devDependencies":{"auto-changelog":"^1.16.2","dependency-check":"^4.1.0","gh-release":"^3.5.0","npm-run-all":"^4.1.5","standard":"^13.1.0","tap":"^14.10.2"},"homepage":"https://github.com/bcomnes/async-neocities","keywords":["neocities","async","api client","static hosting"],"license":"MIT","main":"index.js","name":"async-neocities","repository":{"type":"git","url":"git+https://github.com/bcomnes/async-neocities.git"},"scripts":{"prepublishOnly":"git push --follow-tags && gh-release","test":"run-s test:*","test:deps":"dependency-check . --no-dev --no-peer","test:standard":"standard","test:tape":"tap","version":"auto-changelog -p --template keepachangelog auto-changelog --breaking-pattern 'BREAKING:' && git add CHANGELOG.md"},"standard":{"ignore":["dist"]},"version":"1.1.6"}; + +/***/ }), + +/***/ 453: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(49) +var eos = __webpack_require__(9) +var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), + +/***/ 454: +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__webpack_require__(413)); +var http = _interopDefault(__webpack_require__(605)); +var Url = _interopDefault(__webpack_require__(835)); +var https = _interopDefault(__webpack_require__(211)); +var zlib = _interopDefault(__webpack_require__(761)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = __webpack_require__(18).convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), + +/***/ 470: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(431); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable + */ +function exportVariable(name, val) { + process.env[name] = val; + command_1.issueCommand('set-env', { name }, val); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + command_1.issueCommand('add-path', {}, inputPath); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store + */ +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message + */ +function error(message) { + command_1.issue('error', message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message + */ +function warning(message) { + command_1.issue('warning', message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store + */ +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 475: +/***/ (function(module) { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } + + return state.buffer[0].length + } + + return state.length +} + + +/***/ }), + +/***/ 487: +/***/ (function(module) { + +module.exports = assert + +class AssertionError extends Error {} +AssertionError.prototype.name = 'AssertionError' + +/** + * Minimal assert function + * @param {any} t Value to check if falsy + * @param {string=} m Optional assertion error message + * @throws {AssertionError} + */ +function assert (t, m) { + if (!t) { + var err = new AssertionError(m) + if (Error.captureStackTrace) Error.captureStackTrace(err, assert) + throw err + } +} + + +/***/ }), + +/***/ 500: +/***/ (function(module) { + +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} + + +/***/ }), + +/***/ 512: +/***/ (function(module) { + +module.exports = {"application/1d-interleaved-parityfec":{"source":"iana"},"application/3gpdash-qoe-report+xml":{"source":"iana","compressible":true},"application/3gpp-ims+xml":{"source":"iana","compressible":true},"application/a2l":{"source":"iana"},"application/activemessage":{"source":"iana"},"application/activity+json":{"source":"iana","compressible":true},"application/alto-costmap+json":{"source":"iana","compressible":true},"application/alto-costmapfilter+json":{"source":"iana","compressible":true},"application/alto-directory+json":{"source":"iana","compressible":true},"application/alto-endpointcost+json":{"source":"iana","compressible":true},"application/alto-endpointcostparams+json":{"source":"iana","compressible":true},"application/alto-endpointprop+json":{"source":"iana","compressible":true},"application/alto-endpointpropparams+json":{"source":"iana","compressible":true},"application/alto-error+json":{"source":"iana","compressible":true},"application/alto-networkmap+json":{"source":"iana","compressible":true},"application/alto-networkmapfilter+json":{"source":"iana","compressible":true},"application/aml":{"source":"iana"},"application/andrew-inset":{"source":"iana","extensions":["ez"]},"application/applefile":{"source":"iana"},"application/applixware":{"source":"apache","extensions":["aw"]},"application/atf":{"source":"iana"},"application/atfx":{"source":"iana"},"application/atom+xml":{"source":"iana","compressible":true,"extensions":["atom"]},"application/atomcat+xml":{"source":"iana","compressible":true,"extensions":["atomcat"]},"application/atomdeleted+xml":{"source":"iana","compressible":true,"extensions":["atomdeleted"]},"application/atomicmail":{"source":"iana"},"application/atomsvc+xml":{"source":"iana","compressible":true,"extensions":["atomsvc"]},"application/atsc-dwd+xml":{"source":"iana","compressible":true,"extensions":["dwd"]},"application/atsc-held+xml":{"source":"iana","compressible":true,"extensions":["held"]},"application/atsc-rdt+json":{"source":"iana","compressible":true},"application/atsc-rsat+xml":{"source":"iana","compressible":true,"extensions":["rsat"]},"application/atxml":{"source":"iana"},"application/auth-policy+xml":{"source":"iana","compressible":true},"application/bacnet-xdd+zip":{"source":"iana","compressible":false},"application/batch-smtp":{"source":"iana"},"application/bdoc":{"compressible":false,"extensions":["bdoc"]},"application/beep+xml":{"source":"iana","compressible":true},"application/calendar+json":{"source":"iana","compressible":true},"application/calendar+xml":{"source":"iana","compressible":true,"extensions":["xcs"]},"application/call-completion":{"source":"iana"},"application/cals-1840":{"source":"iana"},"application/cbor":{"source":"iana"},"application/cbor-seq":{"source":"iana"},"application/cccex":{"source":"iana"},"application/ccmp+xml":{"source":"iana","compressible":true},"application/ccxml+xml":{"source":"iana","compressible":true,"extensions":["ccxml"]},"application/cdfx+xml":{"source":"iana","compressible":true,"extensions":["cdfx"]},"application/cdmi-capability":{"source":"iana","extensions":["cdmia"]},"application/cdmi-container":{"source":"iana","extensions":["cdmic"]},"application/cdmi-domain":{"source":"iana","extensions":["cdmid"]},"application/cdmi-object":{"source":"iana","extensions":["cdmio"]},"application/cdmi-queue":{"source":"iana","extensions":["cdmiq"]},"application/cdni":{"source":"iana"},"application/cea":{"source":"iana"},"application/cea-2018+xml":{"source":"iana","compressible":true},"application/cellml+xml":{"source":"iana","compressible":true},"application/cfw":{"source":"iana"},"application/clue+xml":{"source":"iana","compressible":true},"application/clue_info+xml":{"source":"iana","compressible":true},"application/cms":{"source":"iana"},"application/cnrp+xml":{"source":"iana","compressible":true},"application/coap-group+json":{"source":"iana","compressible":true},"application/coap-payload":{"source":"iana"},"application/commonground":{"source":"iana"},"application/conference-info+xml":{"source":"iana","compressible":true},"application/cose":{"source":"iana"},"application/cose-key":{"source":"iana"},"application/cose-key-set":{"source":"iana"},"application/cpl+xml":{"source":"iana","compressible":true},"application/csrattrs":{"source":"iana"},"application/csta+xml":{"source":"iana","compressible":true},"application/cstadata+xml":{"source":"iana","compressible":true},"application/csvm+json":{"source":"iana","compressible":true},"application/cu-seeme":{"source":"apache","extensions":["cu"]},"application/cwt":{"source":"iana"},"application/cybercash":{"source":"iana"},"application/dart":{"compressible":true},"application/dash+xml":{"source":"iana","compressible":true,"extensions":["mpd"]},"application/dashdelta":{"source":"iana"},"application/davmount+xml":{"source":"iana","compressible":true,"extensions":["davmount"]},"application/dca-rft":{"source":"iana"},"application/dcd":{"source":"iana"},"application/dec-dx":{"source":"iana"},"application/dialog-info+xml":{"source":"iana","compressible":true},"application/dicom":{"source":"iana"},"application/dicom+json":{"source":"iana","compressible":true},"application/dicom+xml":{"source":"iana","compressible":true},"application/dii":{"source":"iana"},"application/dit":{"source":"iana"},"application/dns":{"source":"iana"},"application/dns+json":{"source":"iana","compressible":true},"application/dns-message":{"source":"iana"},"application/docbook+xml":{"source":"apache","compressible":true,"extensions":["dbk"]},"application/dskpp+xml":{"source":"iana","compressible":true},"application/dssc+der":{"source":"iana","extensions":["dssc"]},"application/dssc+xml":{"source":"iana","compressible":true,"extensions":["xdssc"]},"application/dvcs":{"source":"iana"},"application/ecmascript":{"source":"iana","compressible":true,"extensions":["ecma","es"]},"application/edi-consent":{"source":"iana"},"application/edi-x12":{"source":"iana","compressible":false},"application/edifact":{"source":"iana","compressible":false},"application/efi":{"source":"iana"},"application/emergencycalldata.comment+xml":{"source":"iana","compressible":true},"application/emergencycalldata.control+xml":{"source":"iana","compressible":true},"application/emergencycalldata.deviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.ecall.msd":{"source":"iana"},"application/emergencycalldata.providerinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.serviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.subscriberinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.veds+xml":{"source":"iana","compressible":true},"application/emma+xml":{"source":"iana","compressible":true,"extensions":["emma"]},"application/emotionml+xml":{"source":"iana","compressible":true,"extensions":["emotionml"]},"application/encaprtp":{"source":"iana"},"application/epp+xml":{"source":"iana","compressible":true},"application/epub+zip":{"source":"iana","compressible":false,"extensions":["epub"]},"application/eshop":{"source":"iana"},"application/exi":{"source":"iana","extensions":["exi"]},"application/expect-ct-report+json":{"source":"iana","compressible":true},"application/fastinfoset":{"source":"iana"},"application/fastsoap":{"source":"iana"},"application/fdt+xml":{"source":"iana","compressible":true,"extensions":["fdt"]},"application/fhir+json":{"source":"iana","compressible":true},"application/fhir+xml":{"source":"iana","compressible":true},"application/fido.trusted-apps+json":{"compressible":true},"application/fits":{"source":"iana"},"application/flexfec":{"source":"iana"},"application/font-sfnt":{"source":"iana"},"application/font-tdpfr":{"source":"iana","extensions":["pfr"]},"application/font-woff":{"source":"iana","compressible":false},"application/framework-attributes+xml":{"source":"iana","compressible":true},"application/geo+json":{"source":"iana","compressible":true,"extensions":["geojson"]},"application/geo+json-seq":{"source":"iana"},"application/geopackage+sqlite3":{"source":"iana"},"application/geoxacml+xml":{"source":"iana","compressible":true},"application/gltf-buffer":{"source":"iana"},"application/gml+xml":{"source":"iana","compressible":true,"extensions":["gml"]},"application/gpx+xml":{"source":"apache","compressible":true,"extensions":["gpx"]},"application/gxf":{"source":"apache","extensions":["gxf"]},"application/gzip":{"source":"iana","compressible":false,"extensions":["gz"]},"application/h224":{"source":"iana"},"application/held+xml":{"source":"iana","compressible":true},"application/hjson":{"extensions":["hjson"]},"application/http":{"source":"iana"},"application/hyperstudio":{"source":"iana","extensions":["stk"]},"application/ibe-key-request+xml":{"source":"iana","compressible":true},"application/ibe-pkg-reply+xml":{"source":"iana","compressible":true},"application/ibe-pp-data":{"source":"iana"},"application/iges":{"source":"iana"},"application/im-iscomposing+xml":{"source":"iana","compressible":true},"application/index":{"source":"iana"},"application/index.cmd":{"source":"iana"},"application/index.obj":{"source":"iana"},"application/index.response":{"source":"iana"},"application/index.vnd":{"source":"iana"},"application/inkml+xml":{"source":"iana","compressible":true,"extensions":["ink","inkml"]},"application/iotp":{"source":"iana"},"application/ipfix":{"source":"iana","extensions":["ipfix"]},"application/ipp":{"source":"iana"},"application/isup":{"source":"iana"},"application/its+xml":{"source":"iana","compressible":true,"extensions":["its"]},"application/java-archive":{"source":"apache","compressible":false,"extensions":["jar","war","ear"]},"application/java-serialized-object":{"source":"apache","compressible":false,"extensions":["ser"]},"application/java-vm":{"source":"apache","compressible":false,"extensions":["class"]},"application/javascript":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["js","mjs"]},"application/jf2feed+json":{"source":"iana","compressible":true},"application/jose":{"source":"iana"},"application/jose+json":{"source":"iana","compressible":true},"application/jrd+json":{"source":"iana","compressible":true},"application/json":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["json","map"]},"application/json-patch+json":{"source":"iana","compressible":true},"application/json-seq":{"source":"iana"},"application/json5":{"extensions":["json5"]},"application/jsonml+json":{"source":"apache","compressible":true,"extensions":["jsonml"]},"application/jwk+json":{"source":"iana","compressible":true},"application/jwk-set+json":{"source":"iana","compressible":true},"application/jwt":{"source":"iana"},"application/kpml-request+xml":{"source":"iana","compressible":true},"application/kpml-response+xml":{"source":"iana","compressible":true},"application/ld+json":{"source":"iana","compressible":true,"extensions":["jsonld"]},"application/lgr+xml":{"source":"iana","compressible":true,"extensions":["lgr"]},"application/link-format":{"source":"iana"},"application/load-control+xml":{"source":"iana","compressible":true},"application/lost+xml":{"source":"iana","compressible":true,"extensions":["lostxml"]},"application/lostsync+xml":{"source":"iana","compressible":true},"application/lxf":{"source":"iana"},"application/mac-binhex40":{"source":"iana","extensions":["hqx"]},"application/mac-compactpro":{"source":"apache","extensions":["cpt"]},"application/macwriteii":{"source":"iana"},"application/mads+xml":{"source":"iana","compressible":true,"extensions":["mads"]},"application/manifest+json":{"charset":"UTF-8","compressible":true,"extensions":["webmanifest"]},"application/marc":{"source":"iana","extensions":["mrc"]},"application/marcxml+xml":{"source":"iana","compressible":true,"extensions":["mrcx"]},"application/mathematica":{"source":"iana","extensions":["ma","nb","mb"]},"application/mathml+xml":{"source":"iana","compressible":true,"extensions":["mathml"]},"application/mathml-content+xml":{"source":"iana","compressible":true},"application/mathml-presentation+xml":{"source":"iana","compressible":true},"application/mbms-associated-procedure-description+xml":{"source":"iana","compressible":true},"application/mbms-deregister+xml":{"source":"iana","compressible":true},"application/mbms-envelope+xml":{"source":"iana","compressible":true},"application/mbms-msk+xml":{"source":"iana","compressible":true},"application/mbms-msk-response+xml":{"source":"iana","compressible":true},"application/mbms-protection-description+xml":{"source":"iana","compressible":true},"application/mbms-reception-report+xml":{"source":"iana","compressible":true},"application/mbms-register+xml":{"source":"iana","compressible":true},"application/mbms-register-response+xml":{"source":"iana","compressible":true},"application/mbms-schedule+xml":{"source":"iana","compressible":true},"application/mbms-user-service-description+xml":{"source":"iana","compressible":true},"application/mbox":{"source":"iana","extensions":["mbox"]},"application/media-policy-dataset+xml":{"source":"iana","compressible":true},"application/media_control+xml":{"source":"iana","compressible":true},"application/mediaservercontrol+xml":{"source":"iana","compressible":true,"extensions":["mscml"]},"application/merge-patch+json":{"source":"iana","compressible":true},"application/metalink+xml":{"source":"apache","compressible":true,"extensions":["metalink"]},"application/metalink4+xml":{"source":"iana","compressible":true,"extensions":["meta4"]},"application/mets+xml":{"source":"iana","compressible":true,"extensions":["mets"]},"application/mf4":{"source":"iana"},"application/mikey":{"source":"iana"},"application/mipc":{"source":"iana"},"application/mmt-aei+xml":{"source":"iana","compressible":true,"extensions":["maei"]},"application/mmt-usd+xml":{"source":"iana","compressible":true,"extensions":["musd"]},"application/mods+xml":{"source":"iana","compressible":true,"extensions":["mods"]},"application/moss-keys":{"source":"iana"},"application/moss-signature":{"source":"iana"},"application/mosskey-data":{"source":"iana"},"application/mosskey-request":{"source":"iana"},"application/mp21":{"source":"iana","extensions":["m21","mp21"]},"application/mp4":{"source":"iana","extensions":["mp4s","m4p"]},"application/mpeg4-generic":{"source":"iana"},"application/mpeg4-iod":{"source":"iana"},"application/mpeg4-iod-xmt":{"source":"iana"},"application/mrb-consumer+xml":{"source":"iana","compressible":true,"extensions":["xdf"]},"application/mrb-publish+xml":{"source":"iana","compressible":true,"extensions":["xdf"]},"application/msc-ivr+xml":{"source":"iana","compressible":true},"application/msc-mixer+xml":{"source":"iana","compressible":true},"application/msword":{"source":"iana","compressible":false,"extensions":["doc","dot"]},"application/mud+json":{"source":"iana","compressible":true},"application/multipart-core":{"source":"iana"},"application/mxf":{"source":"iana","extensions":["mxf"]},"application/n-quads":{"source":"iana","extensions":["nq"]},"application/n-triples":{"source":"iana","extensions":["nt"]},"application/nasdata":{"source":"iana"},"application/news-checkgroups":{"source":"iana"},"application/news-groupinfo":{"source":"iana"},"application/news-transmission":{"source":"iana"},"application/nlsml+xml":{"source":"iana","compressible":true},"application/node":{"source":"iana"},"application/nss":{"source":"iana"},"application/ocsp-request":{"source":"iana"},"application/ocsp-response":{"source":"iana"},"application/octet-stream":{"source":"iana","compressible":false,"extensions":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"]},"application/oda":{"source":"iana","extensions":["oda"]},"application/odm+xml":{"source":"iana","compressible":true},"application/odx":{"source":"iana"},"application/oebps-package+xml":{"source":"iana","compressible":true,"extensions":["opf"]},"application/ogg":{"source":"iana","compressible":false,"extensions":["ogx"]},"application/omdoc+xml":{"source":"apache","compressible":true,"extensions":["omdoc"]},"application/onenote":{"source":"apache","extensions":["onetoc","onetoc2","onetmp","onepkg"]},"application/oscore":{"source":"iana"},"application/oxps":{"source":"iana","extensions":["oxps"]},"application/p2p-overlay+xml":{"source":"iana","compressible":true,"extensions":["relo"]},"application/parityfec":{"source":"iana"},"application/passport":{"source":"iana"},"application/patch-ops-error+xml":{"source":"iana","compressible":true,"extensions":["xer"]},"application/pdf":{"source":"iana","compressible":false,"extensions":["pdf"]},"application/pdx":{"source":"iana"},"application/pem-certificate-chain":{"source":"iana"},"application/pgp-encrypted":{"source":"iana","compressible":false,"extensions":["pgp"]},"application/pgp-keys":{"source":"iana"},"application/pgp-signature":{"source":"iana","extensions":["asc","sig"]},"application/pics-rules":{"source":"apache","extensions":["prf"]},"application/pidf+xml":{"source":"iana","compressible":true},"application/pidf-diff+xml":{"source":"iana","compressible":true},"application/pkcs10":{"source":"iana","extensions":["p10"]},"application/pkcs12":{"source":"iana"},"application/pkcs7-mime":{"source":"iana","extensions":["p7m","p7c"]},"application/pkcs7-signature":{"source":"iana","extensions":["p7s"]},"application/pkcs8":{"source":"iana","extensions":["p8"]},"application/pkcs8-encrypted":{"source":"iana"},"application/pkix-attr-cert":{"source":"iana","extensions":["ac"]},"application/pkix-cert":{"source":"iana","extensions":["cer"]},"application/pkix-crl":{"source":"iana","extensions":["crl"]},"application/pkix-pkipath":{"source":"iana","extensions":["pkipath"]},"application/pkixcmp":{"source":"iana","extensions":["pki"]},"application/pls+xml":{"source":"iana","compressible":true,"extensions":["pls"]},"application/poc-settings+xml":{"source":"iana","compressible":true},"application/postscript":{"source":"iana","compressible":true,"extensions":["ai","eps","ps"]},"application/ppsp-tracker+json":{"source":"iana","compressible":true},"application/problem+json":{"source":"iana","compressible":true},"application/problem+xml":{"source":"iana","compressible":true},"application/provenance+xml":{"source":"iana","compressible":true,"extensions":["provx"]},"application/prs.alvestrand.titrax-sheet":{"source":"iana"},"application/prs.cww":{"source":"iana","extensions":["cww"]},"application/prs.hpub+zip":{"source":"iana","compressible":false},"application/prs.nprend":{"source":"iana"},"application/prs.plucker":{"source":"iana"},"application/prs.rdf-xml-crypt":{"source":"iana"},"application/prs.xsf+xml":{"source":"iana","compressible":true},"application/pskc+xml":{"source":"iana","compressible":true,"extensions":["pskcxml"]},"application/qsig":{"source":"iana"},"application/raml+yaml":{"compressible":true,"extensions":["raml"]},"application/raptorfec":{"source":"iana"},"application/rdap+json":{"source":"iana","compressible":true},"application/rdf+xml":{"source":"iana","compressible":true,"extensions":["rdf","owl"]},"application/reginfo+xml":{"source":"iana","compressible":true,"extensions":["rif"]},"application/relax-ng-compact-syntax":{"source":"iana","extensions":["rnc"]},"application/remote-printing":{"source":"iana"},"application/reputon+json":{"source":"iana","compressible":true},"application/resource-lists+xml":{"source":"iana","compressible":true,"extensions":["rl"]},"application/resource-lists-diff+xml":{"source":"iana","compressible":true,"extensions":["rld"]},"application/rfc+xml":{"source":"iana","compressible":true},"application/riscos":{"source":"iana"},"application/rlmi+xml":{"source":"iana","compressible":true},"application/rls-services+xml":{"source":"iana","compressible":true,"extensions":["rs"]},"application/route-apd+xml":{"source":"iana","compressible":true,"extensions":["rapd"]},"application/route-s-tsid+xml":{"source":"iana","compressible":true,"extensions":["sls"]},"application/route-usd+xml":{"source":"iana","compressible":true,"extensions":["rusd"]},"application/rpki-ghostbusters":{"source":"iana","extensions":["gbr"]},"application/rpki-manifest":{"source":"iana","extensions":["mft"]},"application/rpki-publication":{"source":"iana"},"application/rpki-roa":{"source":"iana","extensions":["roa"]},"application/rpki-updown":{"source":"iana"},"application/rsd+xml":{"source":"apache","compressible":true,"extensions":["rsd"]},"application/rss+xml":{"source":"apache","compressible":true,"extensions":["rss"]},"application/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"application/rtploopback":{"source":"iana"},"application/rtx":{"source":"iana"},"application/samlassertion+xml":{"source":"iana","compressible":true},"application/samlmetadata+xml":{"source":"iana","compressible":true},"application/sbml+xml":{"source":"iana","compressible":true,"extensions":["sbml"]},"application/scaip+xml":{"source":"iana","compressible":true},"application/scim+json":{"source":"iana","compressible":true},"application/scvp-cv-request":{"source":"iana","extensions":["scq"]},"application/scvp-cv-response":{"source":"iana","extensions":["scs"]},"application/scvp-vp-request":{"source":"iana","extensions":["spq"]},"application/scvp-vp-response":{"source":"iana","extensions":["spp"]},"application/sdp":{"source":"iana","extensions":["sdp"]},"application/secevent+jwt":{"source":"iana"},"application/senml+cbor":{"source":"iana"},"application/senml+json":{"source":"iana","compressible":true},"application/senml+xml":{"source":"iana","compressible":true,"extensions":["senmlx"]},"application/senml-exi":{"source":"iana"},"application/sensml+cbor":{"source":"iana"},"application/sensml+json":{"source":"iana","compressible":true},"application/sensml+xml":{"source":"iana","compressible":true,"extensions":["sensmlx"]},"application/sensml-exi":{"source":"iana"},"application/sep+xml":{"source":"iana","compressible":true},"application/sep-exi":{"source":"iana"},"application/session-info":{"source":"iana"},"application/set-payment":{"source":"iana"},"application/set-payment-initiation":{"source":"iana","extensions":["setpay"]},"application/set-registration":{"source":"iana"},"application/set-registration-initiation":{"source":"iana","extensions":["setreg"]},"application/sgml":{"source":"iana"},"application/sgml-open-catalog":{"source":"iana"},"application/shf+xml":{"source":"iana","compressible":true,"extensions":["shf"]},"application/sieve":{"source":"iana","extensions":["siv","sieve"]},"application/simple-filter+xml":{"source":"iana","compressible":true},"application/simple-message-summary":{"source":"iana"},"application/simplesymbolcontainer":{"source":"iana"},"application/sipc":{"source":"iana"},"application/slate":{"source":"iana"},"application/smil":{"source":"iana"},"application/smil+xml":{"source":"iana","compressible":true,"extensions":["smi","smil"]},"application/smpte336m":{"source":"iana"},"application/soap+fastinfoset":{"source":"iana"},"application/soap+xml":{"source":"iana","compressible":true},"application/sparql-query":{"source":"iana","extensions":["rq"]},"application/sparql-results+xml":{"source":"iana","compressible":true,"extensions":["srx"]},"application/spirits-event+xml":{"source":"iana","compressible":true},"application/sql":{"source":"iana"},"application/srgs":{"source":"iana","extensions":["gram"]},"application/srgs+xml":{"source":"iana","compressible":true,"extensions":["grxml"]},"application/sru+xml":{"source":"iana","compressible":true,"extensions":["sru"]},"application/ssdl+xml":{"source":"apache","compressible":true,"extensions":["ssdl"]},"application/ssml+xml":{"source":"iana","compressible":true,"extensions":["ssml"]},"application/stix+json":{"source":"iana","compressible":true},"application/swid+xml":{"source":"iana","compressible":true,"extensions":["swidtag"]},"application/tamp-apex-update":{"source":"iana"},"application/tamp-apex-update-confirm":{"source":"iana"},"application/tamp-community-update":{"source":"iana"},"application/tamp-community-update-confirm":{"source":"iana"},"application/tamp-error":{"source":"iana"},"application/tamp-sequence-adjust":{"source":"iana"},"application/tamp-sequence-adjust-confirm":{"source":"iana"},"application/tamp-status-query":{"source":"iana"},"application/tamp-status-response":{"source":"iana"},"application/tamp-update":{"source":"iana"},"application/tamp-update-confirm":{"source":"iana"},"application/tar":{"compressible":true},"application/taxii+json":{"source":"iana","compressible":true},"application/tei+xml":{"source":"iana","compressible":true,"extensions":["tei","teicorpus"]},"application/tetra_isi":{"source":"iana"},"application/thraud+xml":{"source":"iana","compressible":true,"extensions":["tfi"]},"application/timestamp-query":{"source":"iana"},"application/timestamp-reply":{"source":"iana"},"application/timestamped-data":{"source":"iana","extensions":["tsd"]},"application/tlsrpt+gzip":{"source":"iana"},"application/tlsrpt+json":{"source":"iana","compressible":true},"application/tnauthlist":{"source":"iana"},"application/toml":{"compressible":true,"extensions":["toml"]},"application/trickle-ice-sdpfrag":{"source":"iana"},"application/trig":{"source":"iana"},"application/ttml+xml":{"source":"iana","compressible":true,"extensions":["ttml"]},"application/tve-trigger":{"source":"iana"},"application/tzif":{"source":"iana"},"application/tzif-leap":{"source":"iana"},"application/ulpfec":{"source":"iana"},"application/urc-grpsheet+xml":{"source":"iana","compressible":true},"application/urc-ressheet+xml":{"source":"iana","compressible":true,"extensions":["rsheet"]},"application/urc-targetdesc+xml":{"source":"iana","compressible":true},"application/urc-uisocketdesc+xml":{"source":"iana","compressible":true},"application/vcard+json":{"source":"iana","compressible":true},"application/vcard+xml":{"source":"iana","compressible":true},"application/vemmi":{"source":"iana"},"application/vividence.scriptfile":{"source":"apache"},"application/vnd.1000minds.decision-model+xml":{"source":"iana","compressible":true,"extensions":["1km"]},"application/vnd.3gpp-prose+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-prose-pc3ch+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-v2x-local-service-information":{"source":"iana"},"application/vnd.3gpp.access-transfer-events+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.bsf+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.gmop+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mc-signalling-ear":{"source":"iana"},"application/vnd.3gpp.mcdata-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-payload":{"source":"iana"},"application/vnd.3gpp.mcdata-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-signalling":{"source":"iana"},"application/vnd.3gpp.mcdata-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-floor-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-signed+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-init-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-transmission-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mid-call+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.pic-bw-large":{"source":"iana","extensions":["plb"]},"application/vnd.3gpp.pic-bw-small":{"source":"iana","extensions":["psb"]},"application/vnd.3gpp.pic-bw-var":{"source":"iana","extensions":["pvb"]},"application/vnd.3gpp.sms":{"source":"iana"},"application/vnd.3gpp.sms+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-ext+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.state-and-event-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.ussd+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.bcmcsinfo+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.sms":{"source":"iana"},"application/vnd.3gpp2.tcap":{"source":"iana","extensions":["tcap"]},"application/vnd.3lightssoftware.imagescal":{"source":"iana"},"application/vnd.3m.post-it-notes":{"source":"iana","extensions":["pwn"]},"application/vnd.accpac.simply.aso":{"source":"iana","extensions":["aso"]},"application/vnd.accpac.simply.imp":{"source":"iana","extensions":["imp"]},"application/vnd.acucobol":{"source":"iana","extensions":["acu"]},"application/vnd.acucorp":{"source":"iana","extensions":["atc","acutc"]},"application/vnd.adobe.air-application-installer-package+zip":{"source":"apache","compressible":false,"extensions":["air"]},"application/vnd.adobe.flash.movie":{"source":"iana"},"application/vnd.adobe.formscentral.fcdt":{"source":"iana","extensions":["fcdt"]},"application/vnd.adobe.fxp":{"source":"iana","extensions":["fxp","fxpl"]},"application/vnd.adobe.partial-upload":{"source":"iana"},"application/vnd.adobe.xdp+xml":{"source":"iana","compressible":true,"extensions":["xdp"]},"application/vnd.adobe.xfdf":{"source":"iana","extensions":["xfdf"]},"application/vnd.aether.imp":{"source":"iana"},"application/vnd.afpc.afplinedata":{"source":"iana"},"application/vnd.afpc.afplinedata-pagedef":{"source":"iana"},"application/vnd.afpc.foca-charset":{"source":"iana"},"application/vnd.afpc.foca-codedfont":{"source":"iana"},"application/vnd.afpc.foca-codepage":{"source":"iana"},"application/vnd.afpc.modca":{"source":"iana"},"application/vnd.afpc.modca-formdef":{"source":"iana"},"application/vnd.afpc.modca-mediummap":{"source":"iana"},"application/vnd.afpc.modca-objectcontainer":{"source":"iana"},"application/vnd.afpc.modca-overlay":{"source":"iana"},"application/vnd.afpc.modca-pagesegment":{"source":"iana"},"application/vnd.ah-barcode":{"source":"iana"},"application/vnd.ahead.space":{"source":"iana","extensions":["ahead"]},"application/vnd.airzip.filesecure.azf":{"source":"iana","extensions":["azf"]},"application/vnd.airzip.filesecure.azs":{"source":"iana","extensions":["azs"]},"application/vnd.amadeus+json":{"source":"iana","compressible":true},"application/vnd.amazon.ebook":{"source":"apache","extensions":["azw"]},"application/vnd.amazon.mobi8-ebook":{"source":"iana"},"application/vnd.americandynamics.acc":{"source":"iana","extensions":["acc"]},"application/vnd.amiga.ami":{"source":"iana","extensions":["ami"]},"application/vnd.amundsen.maze+xml":{"source":"iana","compressible":true},"application/vnd.android.ota":{"source":"iana"},"application/vnd.android.package-archive":{"source":"apache","compressible":false,"extensions":["apk"]},"application/vnd.anki":{"source":"iana"},"application/vnd.anser-web-certificate-issue-initiation":{"source":"iana","extensions":["cii"]},"application/vnd.anser-web-funds-transfer-initiation":{"source":"apache","extensions":["fti"]},"application/vnd.antix.game-component":{"source":"iana","extensions":["atx"]},"application/vnd.apache.thrift.binary":{"source":"iana"},"application/vnd.apache.thrift.compact":{"source":"iana"},"application/vnd.apache.thrift.json":{"source":"iana"},"application/vnd.api+json":{"source":"iana","compressible":true},"application/vnd.aplextor.warrp+json":{"source":"iana","compressible":true},"application/vnd.apothekende.reservation+json":{"source":"iana","compressible":true},"application/vnd.apple.installer+xml":{"source":"iana","compressible":true,"extensions":["mpkg"]},"application/vnd.apple.keynote":{"source":"iana","extensions":["keynote"]},"application/vnd.apple.mpegurl":{"source":"iana","extensions":["m3u8"]},"application/vnd.apple.numbers":{"source":"iana","extensions":["numbers"]},"application/vnd.apple.pages":{"source":"iana","extensions":["pages"]},"application/vnd.apple.pkpass":{"compressible":false,"extensions":["pkpass"]},"application/vnd.arastra.swi":{"source":"iana"},"application/vnd.aristanetworks.swi":{"source":"iana","extensions":["swi"]},"application/vnd.artisan+json":{"source":"iana","compressible":true},"application/vnd.artsquare":{"source":"iana"},"application/vnd.astraea-software.iota":{"source":"iana","extensions":["iota"]},"application/vnd.audiograph":{"source":"iana","extensions":["aep"]},"application/vnd.autopackage":{"source":"iana"},"application/vnd.avalon+json":{"source":"iana","compressible":true},"application/vnd.avistar+xml":{"source":"iana","compressible":true},"application/vnd.balsamiq.bmml+xml":{"source":"iana","compressible":true,"extensions":["bmml"]},"application/vnd.balsamiq.bmpr":{"source":"iana"},"application/vnd.banana-accounting":{"source":"iana"},"application/vnd.bbf.usp.error":{"source":"iana"},"application/vnd.bbf.usp.msg":{"source":"iana"},"application/vnd.bbf.usp.msg+json":{"source":"iana","compressible":true},"application/vnd.bekitzur-stech+json":{"source":"iana","compressible":true},"application/vnd.bint.med-content":{"source":"iana"},"application/vnd.biopax.rdf+xml":{"source":"iana","compressible":true},"application/vnd.blink-idb-value-wrapper":{"source":"iana"},"application/vnd.blueice.multipass":{"source":"iana","extensions":["mpm"]},"application/vnd.bluetooth.ep.oob":{"source":"iana"},"application/vnd.bluetooth.le.oob":{"source":"iana"},"application/vnd.bmi":{"source":"iana","extensions":["bmi"]},"application/vnd.bpf":{"source":"iana"},"application/vnd.bpf3":{"source":"iana"},"application/vnd.businessobjects":{"source":"iana","extensions":["rep"]},"application/vnd.byu.uapi+json":{"source":"iana","compressible":true},"application/vnd.cab-jscript":{"source":"iana"},"application/vnd.canon-cpdl":{"source":"iana"},"application/vnd.canon-lips":{"source":"iana"},"application/vnd.capasystems-pg+json":{"source":"iana","compressible":true},"application/vnd.cendio.thinlinc.clientconf":{"source":"iana"},"application/vnd.century-systems.tcp_stream":{"source":"iana"},"application/vnd.chemdraw+xml":{"source":"iana","compressible":true,"extensions":["cdxml"]},"application/vnd.chess-pgn":{"source":"iana"},"application/vnd.chipnuts.karaoke-mmd":{"source":"iana","extensions":["mmd"]},"application/vnd.ciedi":{"source":"iana"},"application/vnd.cinderella":{"source":"iana","extensions":["cdy"]},"application/vnd.cirpack.isdn-ext":{"source":"iana"},"application/vnd.citationstyles.style+xml":{"source":"iana","compressible":true,"extensions":["csl"]},"application/vnd.claymore":{"source":"iana","extensions":["cla"]},"application/vnd.cloanto.rp9":{"source":"iana","extensions":["rp9"]},"application/vnd.clonk.c4group":{"source":"iana","extensions":["c4g","c4d","c4f","c4p","c4u"]},"application/vnd.cluetrust.cartomobile-config":{"source":"iana","extensions":["c11amc"]},"application/vnd.cluetrust.cartomobile-config-pkg":{"source":"iana","extensions":["c11amz"]},"application/vnd.coffeescript":{"source":"iana"},"application/vnd.collabio.xodocuments.document":{"source":"iana"},"application/vnd.collabio.xodocuments.document-template":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation-template":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet-template":{"source":"iana"},"application/vnd.collection+json":{"source":"iana","compressible":true},"application/vnd.collection.doc+json":{"source":"iana","compressible":true},"application/vnd.collection.next+json":{"source":"iana","compressible":true},"application/vnd.comicbook+zip":{"source":"iana","compressible":false},"application/vnd.comicbook-rar":{"source":"iana"},"application/vnd.commerce-battelle":{"source":"iana"},"application/vnd.commonspace":{"source":"iana","extensions":["csp"]},"application/vnd.contact.cmsg":{"source":"iana","extensions":["cdbcmsg"]},"application/vnd.coreos.ignition+json":{"source":"iana","compressible":true},"application/vnd.cosmocaller":{"source":"iana","extensions":["cmc"]},"application/vnd.crick.clicker":{"source":"iana","extensions":["clkx"]},"application/vnd.crick.clicker.keyboard":{"source":"iana","extensions":["clkk"]},"application/vnd.crick.clicker.palette":{"source":"iana","extensions":["clkp"]},"application/vnd.crick.clicker.template":{"source":"iana","extensions":["clkt"]},"application/vnd.crick.clicker.wordbank":{"source":"iana","extensions":["clkw"]},"application/vnd.criticaltools.wbs+xml":{"source":"iana","compressible":true,"extensions":["wbs"]},"application/vnd.cryptii.pipe+json":{"source":"iana","compressible":true},"application/vnd.crypto-shade-file":{"source":"iana"},"application/vnd.ctc-posml":{"source":"iana","extensions":["pml"]},"application/vnd.ctct.ws+xml":{"source":"iana","compressible":true},"application/vnd.cups-pdf":{"source":"iana"},"application/vnd.cups-postscript":{"source":"iana"},"application/vnd.cups-ppd":{"source":"iana","extensions":["ppd"]},"application/vnd.cups-raster":{"source":"iana"},"application/vnd.cups-raw":{"source":"iana"},"application/vnd.curl":{"source":"iana"},"application/vnd.curl.car":{"source":"apache","extensions":["car"]},"application/vnd.curl.pcurl":{"source":"apache","extensions":["pcurl"]},"application/vnd.cyan.dean.root+xml":{"source":"iana","compressible":true},"application/vnd.cybank":{"source":"iana"},"application/vnd.d2l.coursepackage1p0+zip":{"source":"iana","compressible":false},"application/vnd.dart":{"source":"iana","compressible":true,"extensions":["dart"]},"application/vnd.data-vision.rdz":{"source":"iana","extensions":["rdz"]},"application/vnd.datapackage+json":{"source":"iana","compressible":true},"application/vnd.dataresource+json":{"source":"iana","compressible":true},"application/vnd.debian.binary-package":{"source":"iana"},"application/vnd.dece.data":{"source":"iana","extensions":["uvf","uvvf","uvd","uvvd"]},"application/vnd.dece.ttml+xml":{"source":"iana","compressible":true,"extensions":["uvt","uvvt"]},"application/vnd.dece.unspecified":{"source":"iana","extensions":["uvx","uvvx"]},"application/vnd.dece.zip":{"source":"iana","extensions":["uvz","uvvz"]},"application/vnd.denovo.fcselayout-link":{"source":"iana","extensions":["fe_launch"]},"application/vnd.desmume.movie":{"source":"iana"},"application/vnd.dir-bi.plate-dl-nosuffix":{"source":"iana"},"application/vnd.dm.delegation+xml":{"source":"iana","compressible":true},"application/vnd.dna":{"source":"iana","extensions":["dna"]},"application/vnd.document+json":{"source":"iana","compressible":true},"application/vnd.dolby.mlp":{"source":"apache","extensions":["mlp"]},"application/vnd.dolby.mobile.1":{"source":"iana"},"application/vnd.dolby.mobile.2":{"source":"iana"},"application/vnd.doremir.scorecloud-binary-document":{"source":"iana"},"application/vnd.dpgraph":{"source":"iana","extensions":["dpg"]},"application/vnd.dreamfactory":{"source":"iana","extensions":["dfac"]},"application/vnd.drive+json":{"source":"iana","compressible":true},"application/vnd.ds-keypoint":{"source":"apache","extensions":["kpxx"]},"application/vnd.dtg.local":{"source":"iana"},"application/vnd.dtg.local.flash":{"source":"iana"},"application/vnd.dtg.local.html":{"source":"iana"},"application/vnd.dvb.ait":{"source":"iana","extensions":["ait"]},"application/vnd.dvb.dvbj":{"source":"iana"},"application/vnd.dvb.esgcontainer":{"source":"iana"},"application/vnd.dvb.ipdcdftnotifaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess2":{"source":"iana"},"application/vnd.dvb.ipdcesgpdd":{"source":"iana"},"application/vnd.dvb.ipdcroaming":{"source":"iana"},"application/vnd.dvb.iptv.alfec-base":{"source":"iana"},"application/vnd.dvb.iptv.alfec-enhancement":{"source":"iana"},"application/vnd.dvb.notif-aggregate-root+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-container+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-generic+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-msglist+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-request+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-response+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-init+xml":{"source":"iana","compressible":true},"application/vnd.dvb.pfr":{"source":"iana"},"application/vnd.dvb.service":{"source":"iana","extensions":["svc"]},"application/vnd.dxr":{"source":"iana"},"application/vnd.dynageo":{"source":"iana","extensions":["geo"]},"application/vnd.dzr":{"source":"iana"},"application/vnd.easykaraoke.cdgdownload":{"source":"iana"},"application/vnd.ecdis-update":{"source":"iana"},"application/vnd.ecip.rlp":{"source":"iana"},"application/vnd.ecowin.chart":{"source":"iana","extensions":["mag"]},"application/vnd.ecowin.filerequest":{"source":"iana"},"application/vnd.ecowin.fileupdate":{"source":"iana"},"application/vnd.ecowin.series":{"source":"iana"},"application/vnd.ecowin.seriesrequest":{"source":"iana"},"application/vnd.ecowin.seriesupdate":{"source":"iana"},"application/vnd.efi.img":{"source":"iana"},"application/vnd.efi.iso":{"source":"iana"},"application/vnd.emclient.accessrequest+xml":{"source":"iana","compressible":true},"application/vnd.enliven":{"source":"iana","extensions":["nml"]},"application/vnd.enphase.envoy":{"source":"iana"},"application/vnd.eprints.data+xml":{"source":"iana","compressible":true},"application/vnd.epson.esf":{"source":"iana","extensions":["esf"]},"application/vnd.epson.msf":{"source":"iana","extensions":["msf"]},"application/vnd.epson.quickanime":{"source":"iana","extensions":["qam"]},"application/vnd.epson.salt":{"source":"iana","extensions":["slt"]},"application/vnd.epson.ssf":{"source":"iana","extensions":["ssf"]},"application/vnd.ericsson.quickcall":{"source":"iana"},"application/vnd.espass-espass+zip":{"source":"iana","compressible":false},"application/vnd.eszigno3+xml":{"source":"iana","compressible":true,"extensions":["es3","et3"]},"application/vnd.etsi.aoc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.asic-e+zip":{"source":"iana","compressible":false},"application/vnd.etsi.asic-s+zip":{"source":"iana","compressible":false},"application/vnd.etsi.cug+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvcommand+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-bc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-cod+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-npvr+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvservice+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsync+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvueprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mcid+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mheg5":{"source":"iana"},"application/vnd.etsi.overload-control-policy-dataset+xml":{"source":"iana","compressible":true},"application/vnd.etsi.pstn+xml":{"source":"iana","compressible":true},"application/vnd.etsi.sci+xml":{"source":"iana","compressible":true},"application/vnd.etsi.simservs+xml":{"source":"iana","compressible":true},"application/vnd.etsi.timestamp-token":{"source":"iana"},"application/vnd.etsi.tsl+xml":{"source":"iana","compressible":true},"application/vnd.etsi.tsl.der":{"source":"iana"},"application/vnd.eudora.data":{"source":"iana"},"application/vnd.evolv.ecig.profile":{"source":"iana"},"application/vnd.evolv.ecig.settings":{"source":"iana"},"application/vnd.evolv.ecig.theme":{"source":"iana"},"application/vnd.exstream-empower+zip":{"source":"iana","compressible":false},"application/vnd.exstream-package":{"source":"iana"},"application/vnd.ezpix-album":{"source":"iana","extensions":["ez2"]},"application/vnd.ezpix-package":{"source":"iana","extensions":["ez3"]},"application/vnd.f-secure.mobile":{"source":"iana"},"application/vnd.fastcopy-disk-image":{"source":"iana"},"application/vnd.fdf":{"source":"iana","extensions":["fdf"]},"application/vnd.fdsn.mseed":{"source":"iana","extensions":["mseed"]},"application/vnd.fdsn.seed":{"source":"iana","extensions":["seed","dataless"]},"application/vnd.ffsns":{"source":"iana"},"application/vnd.ficlab.flb+zip":{"source":"iana","compressible":false},"application/vnd.filmit.zfc":{"source":"iana"},"application/vnd.fints":{"source":"iana"},"application/vnd.firemonkeys.cloudcell":{"source":"iana"},"application/vnd.flographit":{"source":"iana","extensions":["gph"]},"application/vnd.fluxtime.clip":{"source":"iana","extensions":["ftc"]},"application/vnd.font-fontforge-sfd":{"source":"iana"},"application/vnd.framemaker":{"source":"iana","extensions":["fm","frame","maker","book"]},"application/vnd.frogans.fnc":{"source":"iana","extensions":["fnc"]},"application/vnd.frogans.ltf":{"source":"iana","extensions":["ltf"]},"application/vnd.fsc.weblaunch":{"source":"iana","extensions":["fsc"]},"application/vnd.fujitsu.oasys":{"source":"iana","extensions":["oas"]},"application/vnd.fujitsu.oasys2":{"source":"iana","extensions":["oa2"]},"application/vnd.fujitsu.oasys3":{"source":"iana","extensions":["oa3"]},"application/vnd.fujitsu.oasysgp":{"source":"iana","extensions":["fg5"]},"application/vnd.fujitsu.oasysprs":{"source":"iana","extensions":["bh2"]},"application/vnd.fujixerox.art-ex":{"source":"iana"},"application/vnd.fujixerox.art4":{"source":"iana"},"application/vnd.fujixerox.ddd":{"source":"iana","extensions":["ddd"]},"application/vnd.fujixerox.docuworks":{"source":"iana","extensions":["xdw"]},"application/vnd.fujixerox.docuworks.binder":{"source":"iana","extensions":["xbd"]},"application/vnd.fujixerox.docuworks.container":{"source":"iana"},"application/vnd.fujixerox.hbpl":{"source":"iana"},"application/vnd.fut-misnet":{"source":"iana"},"application/vnd.futoin+cbor":{"source":"iana"},"application/vnd.futoin+json":{"source":"iana","compressible":true},"application/vnd.fuzzysheet":{"source":"iana","extensions":["fzs"]},"application/vnd.genomatix.tuxedo":{"source":"iana","extensions":["txd"]},"application/vnd.gentics.grd+json":{"source":"iana","compressible":true},"application/vnd.geo+json":{"source":"iana","compressible":true},"application/vnd.geocube+xml":{"source":"iana","compressible":true},"application/vnd.geogebra.file":{"source":"iana","extensions":["ggb"]},"application/vnd.geogebra.tool":{"source":"iana","extensions":["ggt"]},"application/vnd.geometry-explorer":{"source":"iana","extensions":["gex","gre"]},"application/vnd.geonext":{"source":"iana","extensions":["gxt"]},"application/vnd.geoplan":{"source":"iana","extensions":["g2w"]},"application/vnd.geospace":{"source":"iana","extensions":["g3w"]},"application/vnd.gerber":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt-response":{"source":"iana"},"application/vnd.gmx":{"source":"iana","extensions":["gmx"]},"application/vnd.google-apps.document":{"compressible":false,"extensions":["gdoc"]},"application/vnd.google-apps.presentation":{"compressible":false,"extensions":["gslides"]},"application/vnd.google-apps.spreadsheet":{"compressible":false,"extensions":["gsheet"]},"application/vnd.google-earth.kml+xml":{"source":"iana","compressible":true,"extensions":["kml"]},"application/vnd.google-earth.kmz":{"source":"iana","compressible":false,"extensions":["kmz"]},"application/vnd.gov.sk.e-form+xml":{"source":"iana","compressible":true},"application/vnd.gov.sk.e-form+zip":{"source":"iana","compressible":false},"application/vnd.gov.sk.xmldatacontainer+xml":{"source":"iana","compressible":true},"application/vnd.grafeq":{"source":"iana","extensions":["gqf","gqs"]},"application/vnd.gridmp":{"source":"iana"},"application/vnd.groove-account":{"source":"iana","extensions":["gac"]},"application/vnd.groove-help":{"source":"iana","extensions":["ghf"]},"application/vnd.groove-identity-message":{"source":"iana","extensions":["gim"]},"application/vnd.groove-injector":{"source":"iana","extensions":["grv"]},"application/vnd.groove-tool-message":{"source":"iana","extensions":["gtm"]},"application/vnd.groove-tool-template":{"source":"iana","extensions":["tpl"]},"application/vnd.groove-vcard":{"source":"iana","extensions":["vcg"]},"application/vnd.hal+json":{"source":"iana","compressible":true},"application/vnd.hal+xml":{"source":"iana","compressible":true,"extensions":["hal"]},"application/vnd.handheld-entertainment+xml":{"source":"iana","compressible":true,"extensions":["zmm"]},"application/vnd.hbci":{"source":"iana","extensions":["hbci"]},"application/vnd.hc+json":{"source":"iana","compressible":true},"application/vnd.hcl-bireports":{"source":"iana"},"application/vnd.hdt":{"source":"iana"},"application/vnd.heroku+json":{"source":"iana","compressible":true},"application/vnd.hhe.lesson-player":{"source":"iana","extensions":["les"]},"application/vnd.hp-hpgl":{"source":"iana","extensions":["hpgl"]},"application/vnd.hp-hpid":{"source":"iana","extensions":["hpid"]},"application/vnd.hp-hps":{"source":"iana","extensions":["hps"]},"application/vnd.hp-jlyt":{"source":"iana","extensions":["jlt"]},"application/vnd.hp-pcl":{"source":"iana","extensions":["pcl"]},"application/vnd.hp-pclxl":{"source":"iana","extensions":["pclxl"]},"application/vnd.httphone":{"source":"iana"},"application/vnd.hydrostatix.sof-data":{"source":"iana","extensions":["sfd-hdstx"]},"application/vnd.hyper+json":{"source":"iana","compressible":true},"application/vnd.hyper-item+json":{"source":"iana","compressible":true},"application/vnd.hyperdrive+json":{"source":"iana","compressible":true},"application/vnd.hzn-3d-crossword":{"source":"iana"},"application/vnd.ibm.afplinedata":{"source":"iana"},"application/vnd.ibm.electronic-media":{"source":"iana"},"application/vnd.ibm.minipay":{"source":"iana","extensions":["mpy"]},"application/vnd.ibm.modcap":{"source":"iana","extensions":["afp","listafp","list3820"]},"application/vnd.ibm.rights-management":{"source":"iana","extensions":["irm"]},"application/vnd.ibm.secure-container":{"source":"iana","extensions":["sc"]},"application/vnd.iccprofile":{"source":"iana","extensions":["icc","icm"]},"application/vnd.ieee.1905":{"source":"iana"},"application/vnd.igloader":{"source":"iana","extensions":["igl"]},"application/vnd.imagemeter.folder+zip":{"source":"iana","compressible":false},"application/vnd.imagemeter.image+zip":{"source":"iana","compressible":false},"application/vnd.immervision-ivp":{"source":"iana","extensions":["ivp"]},"application/vnd.immervision-ivu":{"source":"iana","extensions":["ivu"]},"application/vnd.ims.imsccv1p1":{"source":"iana"},"application/vnd.ims.imsccv1p2":{"source":"iana"},"application/vnd.ims.imsccv1p3":{"source":"iana"},"application/vnd.ims.lis.v2.result+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolconsumerprofile+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy.id+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings.simple+json":{"source":"iana","compressible":true},"application/vnd.informedcontrol.rms+xml":{"source":"iana","compressible":true},"application/vnd.informix-visionary":{"source":"iana"},"application/vnd.infotech.project":{"source":"iana"},"application/vnd.infotech.project+xml":{"source":"iana","compressible":true},"application/vnd.innopath.wamp.notification":{"source":"iana"},"application/vnd.insors.igm":{"source":"iana","extensions":["igm"]},"application/vnd.intercon.formnet":{"source":"iana","extensions":["xpw","xpx"]},"application/vnd.intergeo":{"source":"iana","extensions":["i2g"]},"application/vnd.intertrust.digibox":{"source":"iana"},"application/vnd.intertrust.nncp":{"source":"iana"},"application/vnd.intu.qbo":{"source":"iana","extensions":["qbo"]},"application/vnd.intu.qfx":{"source":"iana","extensions":["qfx"]},"application/vnd.iptc.g2.catalogitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.conceptitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.knowledgeitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsmessage+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.packageitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.planningitem+xml":{"source":"iana","compressible":true},"application/vnd.ipunplugged.rcprofile":{"source":"iana","extensions":["rcprofile"]},"application/vnd.irepository.package+xml":{"source":"iana","compressible":true,"extensions":["irp"]},"application/vnd.is-xpr":{"source":"iana","extensions":["xpr"]},"application/vnd.isac.fcs":{"source":"iana","extensions":["fcs"]},"application/vnd.iso11783-10+zip":{"source":"iana","compressible":false},"application/vnd.jam":{"source":"iana","extensions":["jam"]},"application/vnd.japannet-directory-service":{"source":"iana"},"application/vnd.japannet-jpnstore-wakeup":{"source":"iana"},"application/vnd.japannet-payment-wakeup":{"source":"iana"},"application/vnd.japannet-registration":{"source":"iana"},"application/vnd.japannet-registration-wakeup":{"source":"iana"},"application/vnd.japannet-setstore-wakeup":{"source":"iana"},"application/vnd.japannet-verification":{"source":"iana"},"application/vnd.japannet-verification-wakeup":{"source":"iana"},"application/vnd.jcp.javame.midlet-rms":{"source":"iana","extensions":["rms"]},"application/vnd.jisp":{"source":"iana","extensions":["jisp"]},"application/vnd.joost.joda-archive":{"source":"iana","extensions":["joda"]},"application/vnd.jsk.isdn-ngn":{"source":"iana"},"application/vnd.kahootz":{"source":"iana","extensions":["ktz","ktr"]},"application/vnd.kde.karbon":{"source":"iana","extensions":["karbon"]},"application/vnd.kde.kchart":{"source":"iana","extensions":["chrt"]},"application/vnd.kde.kformula":{"source":"iana","extensions":["kfo"]},"application/vnd.kde.kivio":{"source":"iana","extensions":["flw"]},"application/vnd.kde.kontour":{"source":"iana","extensions":["kon"]},"application/vnd.kde.kpresenter":{"source":"iana","extensions":["kpr","kpt"]},"application/vnd.kde.kspread":{"source":"iana","extensions":["ksp"]},"application/vnd.kde.kword":{"source":"iana","extensions":["kwd","kwt"]},"application/vnd.kenameaapp":{"source":"iana","extensions":["htke"]},"application/vnd.kidspiration":{"source":"iana","extensions":["kia"]},"application/vnd.kinar":{"source":"iana","extensions":["kne","knp"]},"application/vnd.koan":{"source":"iana","extensions":["skp","skd","skt","skm"]},"application/vnd.kodak-descriptor":{"source":"iana","extensions":["sse"]},"application/vnd.las":{"source":"iana"},"application/vnd.las.las+json":{"source":"iana","compressible":true},"application/vnd.las.las+xml":{"source":"iana","compressible":true,"extensions":["lasxml"]},"application/vnd.laszip":{"source":"iana"},"application/vnd.leap+json":{"source":"iana","compressible":true},"application/vnd.liberty-request+xml":{"source":"iana","compressible":true},"application/vnd.llamagraphics.life-balance.desktop":{"source":"iana","extensions":["lbd"]},"application/vnd.llamagraphics.life-balance.exchange+xml":{"source":"iana","compressible":true,"extensions":["lbe"]},"application/vnd.logipipe.circuit+zip":{"source":"iana","compressible":false},"application/vnd.loom":{"source":"iana"},"application/vnd.lotus-1-2-3":{"source":"iana","extensions":["123"]},"application/vnd.lotus-approach":{"source":"iana","extensions":["apr"]},"application/vnd.lotus-freelance":{"source":"iana","extensions":["pre"]},"application/vnd.lotus-notes":{"source":"iana","extensions":["nsf"]},"application/vnd.lotus-organizer":{"source":"iana","extensions":["org"]},"application/vnd.lotus-screencam":{"source":"iana","extensions":["scm"]},"application/vnd.lotus-wordpro":{"source":"iana","extensions":["lwp"]},"application/vnd.macports.portpkg":{"source":"iana","extensions":["portpkg"]},"application/vnd.mapbox-vector-tile":{"source":"iana"},"application/vnd.marlin.drm.actiontoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.conftoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.license+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.mdcf":{"source":"iana"},"application/vnd.mason+json":{"source":"iana","compressible":true},"application/vnd.maxmind.maxmind-db":{"source":"iana"},"application/vnd.mcd":{"source":"iana","extensions":["mcd"]},"application/vnd.medcalcdata":{"source":"iana","extensions":["mc1"]},"application/vnd.mediastation.cdkey":{"source":"iana","extensions":["cdkey"]},"application/vnd.meridian-slingshot":{"source":"iana"},"application/vnd.mfer":{"source":"iana","extensions":["mwf"]},"application/vnd.mfmp":{"source":"iana","extensions":["mfm"]},"application/vnd.micro+json":{"source":"iana","compressible":true},"application/vnd.micrografx.flo":{"source":"iana","extensions":["flo"]},"application/vnd.micrografx.igx":{"source":"iana","extensions":["igx"]},"application/vnd.microsoft.portable-executable":{"source":"iana"},"application/vnd.microsoft.windows.thumbnail-cache":{"source":"iana"},"application/vnd.miele+json":{"source":"iana","compressible":true},"application/vnd.mif":{"source":"iana","extensions":["mif"]},"application/vnd.minisoft-hp3000-save":{"source":"iana"},"application/vnd.mitsubishi.misty-guard.trustweb":{"source":"iana"},"application/vnd.mobius.daf":{"source":"iana","extensions":["daf"]},"application/vnd.mobius.dis":{"source":"iana","extensions":["dis"]},"application/vnd.mobius.mbk":{"source":"iana","extensions":["mbk"]},"application/vnd.mobius.mqy":{"source":"iana","extensions":["mqy"]},"application/vnd.mobius.msl":{"source":"iana","extensions":["msl"]},"application/vnd.mobius.plc":{"source":"iana","extensions":["plc"]},"application/vnd.mobius.txf":{"source":"iana","extensions":["txf"]},"application/vnd.mophun.application":{"source":"iana","extensions":["mpn"]},"application/vnd.mophun.certificate":{"source":"iana","extensions":["mpc"]},"application/vnd.motorola.flexsuite":{"source":"iana"},"application/vnd.motorola.flexsuite.adsi":{"source":"iana"},"application/vnd.motorola.flexsuite.fis":{"source":"iana"},"application/vnd.motorola.flexsuite.gotap":{"source":"iana"},"application/vnd.motorola.flexsuite.kmr":{"source":"iana"},"application/vnd.motorola.flexsuite.ttc":{"source":"iana"},"application/vnd.motorola.flexsuite.wem":{"source":"iana"},"application/vnd.motorola.iprm":{"source":"iana"},"application/vnd.mozilla.xul+xml":{"source":"iana","compressible":true,"extensions":["xul"]},"application/vnd.ms-3mfdocument":{"source":"iana"},"application/vnd.ms-artgalry":{"source":"iana","extensions":["cil"]},"application/vnd.ms-asf":{"source":"iana"},"application/vnd.ms-cab-compressed":{"source":"iana","extensions":["cab"]},"application/vnd.ms-color.iccprofile":{"source":"apache"},"application/vnd.ms-excel":{"source":"iana","compressible":false,"extensions":["xls","xlm","xla","xlc","xlt","xlw"]},"application/vnd.ms-excel.addin.macroenabled.12":{"source":"iana","extensions":["xlam"]},"application/vnd.ms-excel.sheet.binary.macroenabled.12":{"source":"iana","extensions":["xlsb"]},"application/vnd.ms-excel.sheet.macroenabled.12":{"source":"iana","extensions":["xlsm"]},"application/vnd.ms-excel.template.macroenabled.12":{"source":"iana","extensions":["xltm"]},"application/vnd.ms-fontobject":{"source":"iana","compressible":true,"extensions":["eot"]},"application/vnd.ms-htmlhelp":{"source":"iana","extensions":["chm"]},"application/vnd.ms-ims":{"source":"iana","extensions":["ims"]},"application/vnd.ms-lrm":{"source":"iana","extensions":["lrm"]},"application/vnd.ms-office.activex+xml":{"source":"iana","compressible":true},"application/vnd.ms-officetheme":{"source":"iana","extensions":["thmx"]},"application/vnd.ms-opentype":{"source":"apache","compressible":true},"application/vnd.ms-outlook":{"compressible":false,"extensions":["msg"]},"application/vnd.ms-package.obfuscated-opentype":{"source":"apache"},"application/vnd.ms-pki.seccat":{"source":"apache","extensions":["cat"]},"application/vnd.ms-pki.stl":{"source":"apache","extensions":["stl"]},"application/vnd.ms-playready.initiator+xml":{"source":"iana","compressible":true},"application/vnd.ms-powerpoint":{"source":"iana","compressible":false,"extensions":["ppt","pps","pot"]},"application/vnd.ms-powerpoint.addin.macroenabled.12":{"source":"iana","extensions":["ppam"]},"application/vnd.ms-powerpoint.presentation.macroenabled.12":{"source":"iana","extensions":["pptm"]},"application/vnd.ms-powerpoint.slide.macroenabled.12":{"source":"iana","extensions":["sldm"]},"application/vnd.ms-powerpoint.slideshow.macroenabled.12":{"source":"iana","extensions":["ppsm"]},"application/vnd.ms-powerpoint.template.macroenabled.12":{"source":"iana","extensions":["potm"]},"application/vnd.ms-printdevicecapabilities+xml":{"source":"iana","compressible":true},"application/vnd.ms-printing.printticket+xml":{"source":"apache","compressible":true},"application/vnd.ms-printschematicket+xml":{"source":"iana","compressible":true},"application/vnd.ms-project":{"source":"iana","extensions":["mpp","mpt"]},"application/vnd.ms-tnef":{"source":"iana"},"application/vnd.ms-windows.devicepairing":{"source":"iana"},"application/vnd.ms-windows.nwprinting.oob":{"source":"iana"},"application/vnd.ms-windows.printerpairing":{"source":"iana"},"application/vnd.ms-windows.wsd.oob":{"source":"iana"},"application/vnd.ms-wmdrm.lic-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.lic-resp":{"source":"iana"},"application/vnd.ms-wmdrm.meter-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.meter-resp":{"source":"iana"},"application/vnd.ms-word.document.macroenabled.12":{"source":"iana","extensions":["docm"]},"application/vnd.ms-word.template.macroenabled.12":{"source":"iana","extensions":["dotm"]},"application/vnd.ms-works":{"source":"iana","extensions":["wps","wks","wcm","wdb"]},"application/vnd.ms-wpl":{"source":"iana","extensions":["wpl"]},"application/vnd.ms-xpsdocument":{"source":"iana","compressible":false,"extensions":["xps"]},"application/vnd.msa-disk-image":{"source":"iana"},"application/vnd.mseq":{"source":"iana","extensions":["mseq"]},"application/vnd.msign":{"source":"iana"},"application/vnd.multiad.creator":{"source":"iana"},"application/vnd.multiad.creator.cif":{"source":"iana"},"application/vnd.music-niff":{"source":"iana"},"application/vnd.musician":{"source":"iana","extensions":["mus"]},"application/vnd.muvee.style":{"source":"iana","extensions":["msty"]},"application/vnd.mynfc":{"source":"iana","extensions":["taglet"]},"application/vnd.ncd.control":{"source":"iana"},"application/vnd.ncd.reference":{"source":"iana"},"application/vnd.nearst.inv+json":{"source":"iana","compressible":true},"application/vnd.nervana":{"source":"iana"},"application/vnd.netfpx":{"source":"iana"},"application/vnd.neurolanguage.nlu":{"source":"iana","extensions":["nlu"]},"application/vnd.nimn":{"source":"iana"},"application/vnd.nintendo.nitro.rom":{"source":"iana"},"application/vnd.nintendo.snes.rom":{"source":"iana"},"application/vnd.nitf":{"source":"iana","extensions":["ntf","nitf"]},"application/vnd.noblenet-directory":{"source":"iana","extensions":["nnd"]},"application/vnd.noblenet-sealer":{"source":"iana","extensions":["nns"]},"application/vnd.noblenet-web":{"source":"iana","extensions":["nnw"]},"application/vnd.nokia.catalogs":{"source":"iana"},"application/vnd.nokia.conml+wbxml":{"source":"iana"},"application/vnd.nokia.conml+xml":{"source":"iana","compressible":true},"application/vnd.nokia.iptv.config+xml":{"source":"iana","compressible":true},"application/vnd.nokia.isds-radio-presets":{"source":"iana"},"application/vnd.nokia.landmark+wbxml":{"source":"iana"},"application/vnd.nokia.landmark+xml":{"source":"iana","compressible":true},"application/vnd.nokia.landmarkcollection+xml":{"source":"iana","compressible":true},"application/vnd.nokia.n-gage.ac+xml":{"source":"iana","compressible":true,"extensions":["ac"]},"application/vnd.nokia.n-gage.data":{"source":"iana","extensions":["ngdat"]},"application/vnd.nokia.n-gage.symbian.install":{"source":"iana","extensions":["n-gage"]},"application/vnd.nokia.ncd":{"source":"iana"},"application/vnd.nokia.pcd+wbxml":{"source":"iana"},"application/vnd.nokia.pcd+xml":{"source":"iana","compressible":true},"application/vnd.nokia.radio-preset":{"source":"iana","extensions":["rpst"]},"application/vnd.nokia.radio-presets":{"source":"iana","extensions":["rpss"]},"application/vnd.novadigm.edm":{"source":"iana","extensions":["edm"]},"application/vnd.novadigm.edx":{"source":"iana","extensions":["edx"]},"application/vnd.novadigm.ext":{"source":"iana","extensions":["ext"]},"application/vnd.ntt-local.content-share":{"source":"iana"},"application/vnd.ntt-local.file-transfer":{"source":"iana"},"application/vnd.ntt-local.ogw_remote-access":{"source":"iana"},"application/vnd.ntt-local.sip-ta_remote":{"source":"iana"},"application/vnd.ntt-local.sip-ta_tcp_stream":{"source":"iana"},"application/vnd.oasis.opendocument.chart":{"source":"iana","extensions":["odc"]},"application/vnd.oasis.opendocument.chart-template":{"source":"iana","extensions":["otc"]},"application/vnd.oasis.opendocument.database":{"source":"iana","extensions":["odb"]},"application/vnd.oasis.opendocument.formula":{"source":"iana","extensions":["odf"]},"application/vnd.oasis.opendocument.formula-template":{"source":"iana","extensions":["odft"]},"application/vnd.oasis.opendocument.graphics":{"source":"iana","compressible":false,"extensions":["odg"]},"application/vnd.oasis.opendocument.graphics-template":{"source":"iana","extensions":["otg"]},"application/vnd.oasis.opendocument.image":{"source":"iana","extensions":["odi"]},"application/vnd.oasis.opendocument.image-template":{"source":"iana","extensions":["oti"]},"application/vnd.oasis.opendocument.presentation":{"source":"iana","compressible":false,"extensions":["odp"]},"application/vnd.oasis.opendocument.presentation-template":{"source":"iana","extensions":["otp"]},"application/vnd.oasis.opendocument.spreadsheet":{"source":"iana","compressible":false,"extensions":["ods"]},"application/vnd.oasis.opendocument.spreadsheet-template":{"source":"iana","extensions":["ots"]},"application/vnd.oasis.opendocument.text":{"source":"iana","compressible":false,"extensions":["odt"]},"application/vnd.oasis.opendocument.text-master":{"source":"iana","extensions":["odm"]},"application/vnd.oasis.opendocument.text-template":{"source":"iana","extensions":["ott"]},"application/vnd.oasis.opendocument.text-web":{"source":"iana","extensions":["oth"]},"application/vnd.obn":{"source":"iana"},"application/vnd.ocf+cbor":{"source":"iana"},"application/vnd.oftn.l10n+json":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessdownload+xml":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessstreaming+xml":{"source":"iana","compressible":true},"application/vnd.oipf.cspg-hexbinary":{"source":"iana"},"application/vnd.oipf.dae.svg+xml":{"source":"iana","compressible":true},"application/vnd.oipf.dae.xhtml+xml":{"source":"iana","compressible":true},"application/vnd.oipf.mippvcontrolmessage+xml":{"source":"iana","compressible":true},"application/vnd.oipf.pae.gem":{"source":"iana"},"application/vnd.oipf.spdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.oipf.spdlist+xml":{"source":"iana","compressible":true},"application/vnd.oipf.ueprofile+xml":{"source":"iana","compressible":true},"application/vnd.oipf.userprofile+xml":{"source":"iana","compressible":true},"application/vnd.olpc-sugar":{"source":"iana","extensions":["xo"]},"application/vnd.oma-scws-config":{"source":"iana"},"application/vnd.oma-scws-http-request":{"source":"iana"},"application/vnd.oma-scws-http-response":{"source":"iana"},"application/vnd.oma.bcast.associated-procedure-parameter+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.drm-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.imd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.ltkm":{"source":"iana"},"application/vnd.oma.bcast.notification+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.provisioningtrigger":{"source":"iana"},"application/vnd.oma.bcast.sgboot":{"source":"iana"},"application/vnd.oma.bcast.sgdd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sgdu":{"source":"iana"},"application/vnd.oma.bcast.simple-symbol-container":{"source":"iana"},"application/vnd.oma.bcast.smartcard-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sprov+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.stkm":{"source":"iana"},"application/vnd.oma.cab-address-book+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-feature-handler+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-pcc+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-subs-invite+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-user-prefs+xml":{"source":"iana","compressible":true},"application/vnd.oma.dcd":{"source":"iana"},"application/vnd.oma.dcdc":{"source":"iana"},"application/vnd.oma.dd2+xml":{"source":"iana","compressible":true,"extensions":["dd2"]},"application/vnd.oma.drm.risd+xml":{"source":"iana","compressible":true},"application/vnd.oma.group-usage-list+xml":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+json":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+tlv":{"source":"iana"},"application/vnd.oma.pal+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.detailed-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.final-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.groups+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.invocation-descriptor+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.optimized-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.push":{"source":"iana"},"application/vnd.oma.scidm.messages+xml":{"source":"iana","compressible":true},"application/vnd.oma.xcap-directory+xml":{"source":"iana","compressible":true},"application/vnd.omads-email+xml":{"source":"iana","compressible":true},"application/vnd.omads-file+xml":{"source":"iana","compressible":true},"application/vnd.omads-folder+xml":{"source":"iana","compressible":true},"application/vnd.omaloc-supl-init":{"source":"iana"},"application/vnd.onepager":{"source":"iana"},"application/vnd.onepagertamp":{"source":"iana"},"application/vnd.onepagertamx":{"source":"iana"},"application/vnd.onepagertat":{"source":"iana"},"application/vnd.onepagertatp":{"source":"iana"},"application/vnd.onepagertatx":{"source":"iana"},"application/vnd.openblox.game+xml":{"source":"iana","compressible":true,"extensions":["obgx"]},"application/vnd.openblox.game-binary":{"source":"iana"},"application/vnd.openeye.oeb":{"source":"iana"},"application/vnd.openofficeorg.extension":{"source":"apache","extensions":["oxt"]},"application/vnd.openstreetmap.data+xml":{"source":"iana","compressible":true,"extensions":["osm"]},"application/vnd.openxmlformats-officedocument.custom-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.customxmlproperties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawing+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chart+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.extended-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presentation":{"source":"iana","compressible":false,"extensions":["pptx"]},"application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slide":{"source":"iana","extensions":["sldx"]},"application/vnd.openxmlformats-officedocument.presentationml.slide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideshow":{"source":"iana","extensions":["ppsx"]},"application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tags+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.template":{"source":"iana","extensions":["potx"]},"application/vnd.openxmlformats-officedocument.presentationml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":{"source":"iana","compressible":false,"extensions":["xlsx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.template":{"source":"iana","extensions":["xltx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.theme+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.themeoverride+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.vmldrawing":{"source":"iana"},"application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document":{"source":"iana","compressible":false,"extensions":["docx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.template":{"source":"iana","extensions":["dotx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.core-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.relationships+xml":{"source":"iana","compressible":true},"application/vnd.oracle.resource+json":{"source":"iana","compressible":true},"application/vnd.orange.indata":{"source":"iana"},"application/vnd.osa.netdeploy":{"source":"iana"},"application/vnd.osgeo.mapguide.package":{"source":"iana","extensions":["mgp"]},"application/vnd.osgi.bundle":{"source":"iana"},"application/vnd.osgi.dp":{"source":"iana","extensions":["dp"]},"application/vnd.osgi.subsystem":{"source":"iana","extensions":["esa"]},"application/vnd.otps.ct-kip+xml":{"source":"iana","compressible":true},"application/vnd.oxli.countgraph":{"source":"iana"},"application/vnd.pagerduty+json":{"source":"iana","compressible":true},"application/vnd.palm":{"source":"iana","extensions":["pdb","pqa","oprc"]},"application/vnd.panoply":{"source":"iana"},"application/vnd.paos.xml":{"source":"iana"},"application/vnd.patentdive":{"source":"iana"},"application/vnd.patientecommsdoc":{"source":"iana"},"application/vnd.pawaafile":{"source":"iana","extensions":["paw"]},"application/vnd.pcos":{"source":"iana"},"application/vnd.pg.format":{"source":"iana","extensions":["str"]},"application/vnd.pg.osasli":{"source":"iana","extensions":["ei6"]},"application/vnd.piaccess.application-licence":{"source":"iana"},"application/vnd.picsel":{"source":"iana","extensions":["efif"]},"application/vnd.pmi.widget":{"source":"iana","extensions":["wg"]},"application/vnd.poc.group-advertisement+xml":{"source":"iana","compressible":true},"application/vnd.pocketlearn":{"source":"iana","extensions":["plf"]},"application/vnd.powerbuilder6":{"source":"iana","extensions":["pbd"]},"application/vnd.powerbuilder6-s":{"source":"iana"},"application/vnd.powerbuilder7":{"source":"iana"},"application/vnd.powerbuilder7-s":{"source":"iana"},"application/vnd.powerbuilder75":{"source":"iana"},"application/vnd.powerbuilder75-s":{"source":"iana"},"application/vnd.preminet":{"source":"iana"},"application/vnd.previewsystems.box":{"source":"iana","extensions":["box"]},"application/vnd.proteus.magazine":{"source":"iana","extensions":["mgz"]},"application/vnd.psfs":{"source":"iana"},"application/vnd.publishare-delta-tree":{"source":"iana","extensions":["qps"]},"application/vnd.pvi.ptid1":{"source":"iana","extensions":["ptid"]},"application/vnd.pwg-multiplexed":{"source":"iana"},"application/vnd.pwg-xhtml-print+xml":{"source":"iana","compressible":true},"application/vnd.qualcomm.brew-app-res":{"source":"iana"},"application/vnd.quarantainenet":{"source":"iana"},"application/vnd.quark.quarkxpress":{"source":"iana","extensions":["qxd","qxt","qwd","qwt","qxl","qxb"]},"application/vnd.quobject-quoxdocument":{"source":"iana"},"application/vnd.radisys.moml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conn+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-stream+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-base+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-detect+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-sendrecv+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-group+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-speech+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-transform+xml":{"source":"iana","compressible":true},"application/vnd.rainstor.data":{"source":"iana"},"application/vnd.rapid":{"source":"iana"},"application/vnd.rar":{"source":"iana"},"application/vnd.realvnc.bed":{"source":"iana","extensions":["bed"]},"application/vnd.recordare.musicxml":{"source":"iana","extensions":["mxl"]},"application/vnd.recordare.musicxml+xml":{"source":"iana","compressible":true,"extensions":["musicxml"]},"application/vnd.renlearn.rlprint":{"source":"iana"},"application/vnd.restful+json":{"source":"iana","compressible":true},"application/vnd.rig.cryptonote":{"source":"iana","extensions":["cryptonote"]},"application/vnd.rim.cod":{"source":"apache","extensions":["cod"]},"application/vnd.rn-realmedia":{"source":"apache","extensions":["rm"]},"application/vnd.rn-realmedia-vbr":{"source":"apache","extensions":["rmvb"]},"application/vnd.route66.link66+xml":{"source":"iana","compressible":true,"extensions":["link66"]},"application/vnd.rs-274x":{"source":"iana"},"application/vnd.ruckus.download":{"source":"iana"},"application/vnd.s3sms":{"source":"iana"},"application/vnd.sailingtracker.track":{"source":"iana","extensions":["st"]},"application/vnd.sbm.cid":{"source":"iana"},"application/vnd.sbm.mid2":{"source":"iana"},"application/vnd.scribus":{"source":"iana"},"application/vnd.sealed.3df":{"source":"iana"},"application/vnd.sealed.csf":{"source":"iana"},"application/vnd.sealed.doc":{"source":"iana"},"application/vnd.sealed.eml":{"source":"iana"},"application/vnd.sealed.mht":{"source":"iana"},"application/vnd.sealed.net":{"source":"iana"},"application/vnd.sealed.ppt":{"source":"iana"},"application/vnd.sealed.tiff":{"source":"iana"},"application/vnd.sealed.xls":{"source":"iana"},"application/vnd.sealedmedia.softseal.html":{"source":"iana"},"application/vnd.sealedmedia.softseal.pdf":{"source":"iana"},"application/vnd.seemail":{"source":"iana","extensions":["see"]},"application/vnd.sema":{"source":"iana","extensions":["sema"]},"application/vnd.semd":{"source":"iana","extensions":["semd"]},"application/vnd.semf":{"source":"iana","extensions":["semf"]},"application/vnd.shade-save-file":{"source":"iana"},"application/vnd.shana.informed.formdata":{"source":"iana","extensions":["ifm"]},"application/vnd.shana.informed.formtemplate":{"source":"iana","extensions":["itp"]},"application/vnd.shana.informed.interchange":{"source":"iana","extensions":["iif"]},"application/vnd.shana.informed.package":{"source":"iana","extensions":["ipk"]},"application/vnd.shootproof+json":{"source":"iana","compressible":true},"application/vnd.shopkick+json":{"source":"iana","compressible":true},"application/vnd.sigrok.session":{"source":"iana"},"application/vnd.simtech-mindmapper":{"source":"iana","extensions":["twd","twds"]},"application/vnd.siren+json":{"source":"iana","compressible":true},"application/vnd.smaf":{"source":"iana","extensions":["mmf"]},"application/vnd.smart.notebook":{"source":"iana"},"application/vnd.smart.teacher":{"source":"iana","extensions":["teacher"]},"application/vnd.software602.filler.form+xml":{"source":"iana","compressible":true,"extensions":["fo"]},"application/vnd.software602.filler.form-xml-zip":{"source":"iana"},"application/vnd.solent.sdkm+xml":{"source":"iana","compressible":true,"extensions":["sdkm","sdkd"]},"application/vnd.spotfire.dxp":{"source":"iana","extensions":["dxp"]},"application/vnd.spotfire.sfs":{"source":"iana","extensions":["sfs"]},"application/vnd.sqlite3":{"source":"iana"},"application/vnd.sss-cod":{"source":"iana"},"application/vnd.sss-dtf":{"source":"iana"},"application/vnd.sss-ntf":{"source":"iana"},"application/vnd.stardivision.calc":{"source":"apache","extensions":["sdc"]},"application/vnd.stardivision.draw":{"source":"apache","extensions":["sda"]},"application/vnd.stardivision.impress":{"source":"apache","extensions":["sdd"]},"application/vnd.stardivision.math":{"source":"apache","extensions":["smf"]},"application/vnd.stardivision.writer":{"source":"apache","extensions":["sdw","vor"]},"application/vnd.stardivision.writer-global":{"source":"apache","extensions":["sgl"]},"application/vnd.stepmania.package":{"source":"iana","extensions":["smzip"]},"application/vnd.stepmania.stepchart":{"source":"iana","extensions":["sm"]},"application/vnd.street-stream":{"source":"iana"},"application/vnd.sun.wadl+xml":{"source":"iana","compressible":true,"extensions":["wadl"]},"application/vnd.sun.xml.calc":{"source":"apache","extensions":["sxc"]},"application/vnd.sun.xml.calc.template":{"source":"apache","extensions":["stc"]},"application/vnd.sun.xml.draw":{"source":"apache","extensions":["sxd"]},"application/vnd.sun.xml.draw.template":{"source":"apache","extensions":["std"]},"application/vnd.sun.xml.impress":{"source":"apache","extensions":["sxi"]},"application/vnd.sun.xml.impress.template":{"source":"apache","extensions":["sti"]},"application/vnd.sun.xml.math":{"source":"apache","extensions":["sxm"]},"application/vnd.sun.xml.writer":{"source":"apache","extensions":["sxw"]},"application/vnd.sun.xml.writer.global":{"source":"apache","extensions":["sxg"]},"application/vnd.sun.xml.writer.template":{"source":"apache","extensions":["stw"]},"application/vnd.sus-calendar":{"source":"iana","extensions":["sus","susp"]},"application/vnd.svd":{"source":"iana","extensions":["svd"]},"application/vnd.swiftview-ics":{"source":"iana"},"application/vnd.symbian.install":{"source":"apache","extensions":["sis","sisx"]},"application/vnd.syncml+xml":{"source":"iana","compressible":true,"extensions":["xsm"]},"application/vnd.syncml.dm+wbxml":{"source":"iana","extensions":["bdm"]},"application/vnd.syncml.dm+xml":{"source":"iana","compressible":true,"extensions":["xdm"]},"application/vnd.syncml.dm.notification":{"source":"iana"},"application/vnd.syncml.dmddf+wbxml":{"source":"iana"},"application/vnd.syncml.dmddf+xml":{"source":"iana","compressible":true,"extensions":["ddf"]},"application/vnd.syncml.dmtnds+wbxml":{"source":"iana"},"application/vnd.syncml.dmtnds+xml":{"source":"iana","compressible":true},"application/vnd.syncml.ds.notification":{"source":"iana"},"application/vnd.tableschema+json":{"source":"iana","compressible":true},"application/vnd.tao.intent-module-archive":{"source":"iana","extensions":["tao"]},"application/vnd.tcpdump.pcap":{"source":"iana","extensions":["pcap","cap","dmp"]},"application/vnd.think-cell.ppttc+json":{"source":"iana","compressible":true},"application/vnd.tmd.mediaflex.api+xml":{"source":"iana","compressible":true},"application/vnd.tml":{"source":"iana"},"application/vnd.tmobile-livetv":{"source":"iana","extensions":["tmo"]},"application/vnd.tri.onesource":{"source":"iana"},"application/vnd.trid.tpt":{"source":"iana","extensions":["tpt"]},"application/vnd.triscape.mxs":{"source":"iana","extensions":["mxs"]},"application/vnd.trueapp":{"source":"iana","extensions":["tra"]},"application/vnd.truedoc":{"source":"iana"},"application/vnd.ubisoft.webplayer":{"source":"iana"},"application/vnd.ufdl":{"source":"iana","extensions":["ufd","ufdl"]},"application/vnd.uiq.theme":{"source":"iana","extensions":["utz"]},"application/vnd.umajin":{"source":"iana","extensions":["umj"]},"application/vnd.unity":{"source":"iana","extensions":["unityweb"]},"application/vnd.uoml+xml":{"source":"iana","compressible":true,"extensions":["uoml"]},"application/vnd.uplanet.alert":{"source":"iana"},"application/vnd.uplanet.alert-wbxml":{"source":"iana"},"application/vnd.uplanet.bearer-choice":{"source":"iana"},"application/vnd.uplanet.bearer-choice-wbxml":{"source":"iana"},"application/vnd.uplanet.cacheop":{"source":"iana"},"application/vnd.uplanet.cacheop-wbxml":{"source":"iana"},"application/vnd.uplanet.channel":{"source":"iana"},"application/vnd.uplanet.channel-wbxml":{"source":"iana"},"application/vnd.uplanet.list":{"source":"iana"},"application/vnd.uplanet.list-wbxml":{"source":"iana"},"application/vnd.uplanet.listcmd":{"source":"iana"},"application/vnd.uplanet.listcmd-wbxml":{"source":"iana"},"application/vnd.uplanet.signal":{"source":"iana"},"application/vnd.uri-map":{"source":"iana"},"application/vnd.valve.source.material":{"source":"iana"},"application/vnd.vcx":{"source":"iana","extensions":["vcx"]},"application/vnd.vd-study":{"source":"iana"},"application/vnd.vectorworks":{"source":"iana"},"application/vnd.vel+json":{"source":"iana","compressible":true},"application/vnd.verimatrix.vcas":{"source":"iana"},"application/vnd.veryant.thin":{"source":"iana"},"application/vnd.ves.encrypted":{"source":"iana"},"application/vnd.vidsoft.vidconference":{"source":"iana"},"application/vnd.visio":{"source":"iana","extensions":["vsd","vst","vss","vsw"]},"application/vnd.visionary":{"source":"iana","extensions":["vis"]},"application/vnd.vividence.scriptfile":{"source":"iana"},"application/vnd.vsf":{"source":"iana","extensions":["vsf"]},"application/vnd.wap.sic":{"source":"iana"},"application/vnd.wap.slc":{"source":"iana"},"application/vnd.wap.wbxml":{"source":"iana","extensions":["wbxml"]},"application/vnd.wap.wmlc":{"source":"iana","extensions":["wmlc"]},"application/vnd.wap.wmlscriptc":{"source":"iana","extensions":["wmlsc"]},"application/vnd.webturbo":{"source":"iana","extensions":["wtb"]},"application/vnd.wfa.p2p":{"source":"iana"},"application/vnd.wfa.wsc":{"source":"iana"},"application/vnd.windows.devicepairing":{"source":"iana"},"application/vnd.wmc":{"source":"iana"},"application/vnd.wmf.bootstrap":{"source":"iana"},"application/vnd.wolfram.mathematica":{"source":"iana"},"application/vnd.wolfram.mathematica.package":{"source":"iana"},"application/vnd.wolfram.player":{"source":"iana","extensions":["nbp"]},"application/vnd.wordperfect":{"source":"iana","extensions":["wpd"]},"application/vnd.wqd":{"source":"iana","extensions":["wqd"]},"application/vnd.wrq-hp3000-labelled":{"source":"iana"},"application/vnd.wt.stf":{"source":"iana","extensions":["stf"]},"application/vnd.wv.csp+wbxml":{"source":"iana"},"application/vnd.wv.csp+xml":{"source":"iana","compressible":true},"application/vnd.wv.ssp+xml":{"source":"iana","compressible":true},"application/vnd.xacml+json":{"source":"iana","compressible":true},"application/vnd.xara":{"source":"iana","extensions":["xar"]},"application/vnd.xfdl":{"source":"iana","extensions":["xfdl"]},"application/vnd.xfdl.webform":{"source":"iana"},"application/vnd.xmi+xml":{"source":"iana","compressible":true},"application/vnd.xmpie.cpkg":{"source":"iana"},"application/vnd.xmpie.dpkg":{"source":"iana"},"application/vnd.xmpie.plan":{"source":"iana"},"application/vnd.xmpie.ppkg":{"source":"iana"},"application/vnd.xmpie.xlim":{"source":"iana"},"application/vnd.yamaha.hv-dic":{"source":"iana","extensions":["hvd"]},"application/vnd.yamaha.hv-script":{"source":"iana","extensions":["hvs"]},"application/vnd.yamaha.hv-voice":{"source":"iana","extensions":["hvp"]},"application/vnd.yamaha.openscoreformat":{"source":"iana","extensions":["osf"]},"application/vnd.yamaha.openscoreformat.osfpvg+xml":{"source":"iana","compressible":true,"extensions":["osfpvg"]},"application/vnd.yamaha.remote-setup":{"source":"iana"},"application/vnd.yamaha.smaf-audio":{"source":"iana","extensions":["saf"]},"application/vnd.yamaha.smaf-phrase":{"source":"iana","extensions":["spf"]},"application/vnd.yamaha.through-ngn":{"source":"iana"},"application/vnd.yamaha.tunnel-udpencap":{"source":"iana"},"application/vnd.yaoweme":{"source":"iana"},"application/vnd.yellowriver-custom-menu":{"source":"iana","extensions":["cmp"]},"application/vnd.youtube.yt":{"source":"iana"},"application/vnd.zul":{"source":"iana","extensions":["zir","zirz"]},"application/vnd.zzazz.deck+xml":{"source":"iana","compressible":true,"extensions":["zaz"]},"application/voicexml+xml":{"source":"iana","compressible":true,"extensions":["vxml"]},"application/voucher-cms+json":{"source":"iana","compressible":true},"application/vq-rtcpxr":{"source":"iana"},"application/wasm":{"compressible":true,"extensions":["wasm"]},"application/watcherinfo+xml":{"source":"iana","compressible":true},"application/webpush-options+json":{"source":"iana","compressible":true},"application/whoispp-query":{"source":"iana"},"application/whoispp-response":{"source":"iana"},"application/widget":{"source":"iana","extensions":["wgt"]},"application/winhlp":{"source":"apache","extensions":["hlp"]},"application/wita":{"source":"iana"},"application/wordperfect5.1":{"source":"iana"},"application/wsdl+xml":{"source":"iana","compressible":true,"extensions":["wsdl"]},"application/wspolicy+xml":{"source":"iana","compressible":true,"extensions":["wspolicy"]},"application/x-7z-compressed":{"source":"apache","compressible":false,"extensions":["7z"]},"application/x-abiword":{"source":"apache","extensions":["abw"]},"application/x-ace-compressed":{"source":"apache","extensions":["ace"]},"application/x-amf":{"source":"apache"},"application/x-apple-diskimage":{"source":"apache","extensions":["dmg"]},"application/x-arj":{"compressible":false,"extensions":["arj"]},"application/x-authorware-bin":{"source":"apache","extensions":["aab","x32","u32","vox"]},"application/x-authorware-map":{"source":"apache","extensions":["aam"]},"application/x-authorware-seg":{"source":"apache","extensions":["aas"]},"application/x-bcpio":{"source":"apache","extensions":["bcpio"]},"application/x-bdoc":{"compressible":false,"extensions":["bdoc"]},"application/x-bittorrent":{"source":"apache","extensions":["torrent"]},"application/x-blorb":{"source":"apache","extensions":["blb","blorb"]},"application/x-bzip":{"source":"apache","compressible":false,"extensions":["bz"]},"application/x-bzip2":{"source":"apache","compressible":false,"extensions":["bz2","boz"]},"application/x-cbr":{"source":"apache","extensions":["cbr","cba","cbt","cbz","cb7"]},"application/x-cdlink":{"source":"apache","extensions":["vcd"]},"application/x-cfs-compressed":{"source":"apache","extensions":["cfs"]},"application/x-chat":{"source":"apache","extensions":["chat"]},"application/x-chess-pgn":{"source":"apache","extensions":["pgn"]},"application/x-chrome-extension":{"extensions":["crx"]},"application/x-cocoa":{"source":"nginx","extensions":["cco"]},"application/x-compress":{"source":"apache"},"application/x-conference":{"source":"apache","extensions":["nsc"]},"application/x-cpio":{"source":"apache","extensions":["cpio"]},"application/x-csh":{"source":"apache","extensions":["csh"]},"application/x-deb":{"compressible":false},"application/x-debian-package":{"source":"apache","extensions":["deb","udeb"]},"application/x-dgc-compressed":{"source":"apache","extensions":["dgc"]},"application/x-director":{"source":"apache","extensions":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"]},"application/x-doom":{"source":"apache","extensions":["wad"]},"application/x-dtbncx+xml":{"source":"apache","compressible":true,"extensions":["ncx"]},"application/x-dtbook+xml":{"source":"apache","compressible":true,"extensions":["dtb"]},"application/x-dtbresource+xml":{"source":"apache","compressible":true,"extensions":["res"]},"application/x-dvi":{"source":"apache","compressible":false,"extensions":["dvi"]},"application/x-envoy":{"source":"apache","extensions":["evy"]},"application/x-eva":{"source":"apache","extensions":["eva"]},"application/x-font-bdf":{"source":"apache","extensions":["bdf"]},"application/x-font-dos":{"source":"apache"},"application/x-font-framemaker":{"source":"apache"},"application/x-font-ghostscript":{"source":"apache","extensions":["gsf"]},"application/x-font-libgrx":{"source":"apache"},"application/x-font-linux-psf":{"source":"apache","extensions":["psf"]},"application/x-font-pcf":{"source":"apache","extensions":["pcf"]},"application/x-font-snf":{"source":"apache","extensions":["snf"]},"application/x-font-speedo":{"source":"apache"},"application/x-font-sunos-news":{"source":"apache"},"application/x-font-type1":{"source":"apache","extensions":["pfa","pfb","pfm","afm"]},"application/x-font-vfont":{"source":"apache"},"application/x-freearc":{"source":"apache","extensions":["arc"]},"application/x-futuresplash":{"source":"apache","extensions":["spl"]},"application/x-gca-compressed":{"source":"apache","extensions":["gca"]},"application/x-glulx":{"source":"apache","extensions":["ulx"]},"application/x-gnumeric":{"source":"apache","extensions":["gnumeric"]},"application/x-gramps-xml":{"source":"apache","extensions":["gramps"]},"application/x-gtar":{"source":"apache","extensions":["gtar"]},"application/x-gzip":{"source":"apache"},"application/x-hdf":{"source":"apache","extensions":["hdf"]},"application/x-httpd-php":{"compressible":true,"extensions":["php"]},"application/x-install-instructions":{"source":"apache","extensions":["install"]},"application/x-iso9660-image":{"source":"apache","extensions":["iso"]},"application/x-java-archive-diff":{"source":"nginx","extensions":["jardiff"]},"application/x-java-jnlp-file":{"source":"apache","compressible":false,"extensions":["jnlp"]},"application/x-javascript":{"compressible":true},"application/x-keepass2":{"extensions":["kdbx"]},"application/x-latex":{"source":"apache","compressible":false,"extensions":["latex"]},"application/x-lua-bytecode":{"extensions":["luac"]},"application/x-lzh-compressed":{"source":"apache","extensions":["lzh","lha"]},"application/x-makeself":{"source":"nginx","extensions":["run"]},"application/x-mie":{"source":"apache","extensions":["mie"]},"application/x-mobipocket-ebook":{"source":"apache","extensions":["prc","mobi"]},"application/x-mpegurl":{"compressible":false},"application/x-ms-application":{"source":"apache","extensions":["application"]},"application/x-ms-shortcut":{"source":"apache","extensions":["lnk"]},"application/x-ms-wmd":{"source":"apache","extensions":["wmd"]},"application/x-ms-wmz":{"source":"apache","extensions":["wmz"]},"application/x-ms-xbap":{"source":"apache","extensions":["xbap"]},"application/x-msaccess":{"source":"apache","extensions":["mdb"]},"application/x-msbinder":{"source":"apache","extensions":["obd"]},"application/x-mscardfile":{"source":"apache","extensions":["crd"]},"application/x-msclip":{"source":"apache","extensions":["clp"]},"application/x-msdos-program":{"extensions":["exe"]},"application/x-msdownload":{"source":"apache","extensions":["exe","dll","com","bat","msi"]},"application/x-msmediaview":{"source":"apache","extensions":["mvb","m13","m14"]},"application/x-msmetafile":{"source":"apache","extensions":["wmf","wmz","emf","emz"]},"application/x-msmoney":{"source":"apache","extensions":["mny"]},"application/x-mspublisher":{"source":"apache","extensions":["pub"]},"application/x-msschedule":{"source":"apache","extensions":["scd"]},"application/x-msterminal":{"source":"apache","extensions":["trm"]},"application/x-mswrite":{"source":"apache","extensions":["wri"]},"application/x-netcdf":{"source":"apache","extensions":["nc","cdf"]},"application/x-ns-proxy-autoconfig":{"compressible":true,"extensions":["pac"]},"application/x-nzb":{"source":"apache","extensions":["nzb"]},"application/x-perl":{"source":"nginx","extensions":["pl","pm"]},"application/x-pilot":{"source":"nginx","extensions":["prc","pdb"]},"application/x-pkcs12":{"source":"apache","compressible":false,"extensions":["p12","pfx"]},"application/x-pkcs7-certificates":{"source":"apache","extensions":["p7b","spc"]},"application/x-pkcs7-certreqresp":{"source":"apache","extensions":["p7r"]},"application/x-rar-compressed":{"source":"apache","compressible":false,"extensions":["rar"]},"application/x-redhat-package-manager":{"source":"nginx","extensions":["rpm"]},"application/x-research-info-systems":{"source":"apache","extensions":["ris"]},"application/x-sea":{"source":"nginx","extensions":["sea"]},"application/x-sh":{"source":"apache","compressible":true,"extensions":["sh"]},"application/x-shar":{"source":"apache","extensions":["shar"]},"application/x-shockwave-flash":{"source":"apache","compressible":false,"extensions":["swf"]},"application/x-silverlight-app":{"source":"apache","extensions":["xap"]},"application/x-sql":{"source":"apache","extensions":["sql"]},"application/x-stuffit":{"source":"apache","compressible":false,"extensions":["sit"]},"application/x-stuffitx":{"source":"apache","extensions":["sitx"]},"application/x-subrip":{"source":"apache","extensions":["srt"]},"application/x-sv4cpio":{"source":"apache","extensions":["sv4cpio"]},"application/x-sv4crc":{"source":"apache","extensions":["sv4crc"]},"application/x-t3vm-image":{"source":"apache","extensions":["t3"]},"application/x-tads":{"source":"apache","extensions":["gam"]},"application/x-tar":{"source":"apache","compressible":true,"extensions":["tar"]},"application/x-tcl":{"source":"apache","extensions":["tcl","tk"]},"application/x-tex":{"source":"apache","extensions":["tex"]},"application/x-tex-tfm":{"source":"apache","extensions":["tfm"]},"application/x-texinfo":{"source":"apache","extensions":["texinfo","texi"]},"application/x-tgif":{"source":"apache","extensions":["obj"]},"application/x-ustar":{"source":"apache","extensions":["ustar"]},"application/x-virtualbox-hdd":{"compressible":true,"extensions":["hdd"]},"application/x-virtualbox-ova":{"compressible":true,"extensions":["ova"]},"application/x-virtualbox-ovf":{"compressible":true,"extensions":["ovf"]},"application/x-virtualbox-vbox":{"compressible":true,"extensions":["vbox"]},"application/x-virtualbox-vbox-extpack":{"compressible":false,"extensions":["vbox-extpack"]},"application/x-virtualbox-vdi":{"compressible":true,"extensions":["vdi"]},"application/x-virtualbox-vhd":{"compressible":true,"extensions":["vhd"]},"application/x-virtualbox-vmdk":{"compressible":true,"extensions":["vmdk"]},"application/x-wais-source":{"source":"apache","extensions":["src"]},"application/x-web-app-manifest+json":{"compressible":true,"extensions":["webapp"]},"application/x-www-form-urlencoded":{"source":"iana","compressible":true},"application/x-x509-ca-cert":{"source":"apache","extensions":["der","crt","pem"]},"application/x-xfig":{"source":"apache","extensions":["fig"]},"application/x-xliff+xml":{"source":"apache","compressible":true,"extensions":["xlf"]},"application/x-xpinstall":{"source":"apache","compressible":false,"extensions":["xpi"]},"application/x-xz":{"source":"apache","extensions":["xz"]},"application/x-zmachine":{"source":"apache","extensions":["z1","z2","z3","z4","z5","z6","z7","z8"]},"application/x400-bp":{"source":"iana"},"application/xacml+xml":{"source":"iana","compressible":true},"application/xaml+xml":{"source":"apache","compressible":true,"extensions":["xaml"]},"application/xcap-att+xml":{"source":"iana","compressible":true,"extensions":["xav"]},"application/xcap-caps+xml":{"source":"iana","compressible":true,"extensions":["xca"]},"application/xcap-diff+xml":{"source":"iana","compressible":true,"extensions":["xdf"]},"application/xcap-el+xml":{"source":"iana","compressible":true,"extensions":["xel"]},"application/xcap-error+xml":{"source":"iana","compressible":true,"extensions":["xer"]},"application/xcap-ns+xml":{"source":"iana","compressible":true,"extensions":["xns"]},"application/xcon-conference-info+xml":{"source":"iana","compressible":true},"application/xcon-conference-info-diff+xml":{"source":"iana","compressible":true},"application/xenc+xml":{"source":"iana","compressible":true,"extensions":["xenc"]},"application/xhtml+xml":{"source":"iana","compressible":true,"extensions":["xhtml","xht"]},"application/xhtml-voice+xml":{"source":"apache","compressible":true},"application/xliff+xml":{"source":"iana","compressible":true,"extensions":["xlf"]},"application/xml":{"source":"iana","compressible":true,"extensions":["xml","xsl","xsd","rng"]},"application/xml-dtd":{"source":"iana","compressible":true,"extensions":["dtd"]},"application/xml-external-parsed-entity":{"source":"iana"},"application/xml-patch+xml":{"source":"iana","compressible":true},"application/xmpp+xml":{"source":"iana","compressible":true},"application/xop+xml":{"source":"iana","compressible":true,"extensions":["xop"]},"application/xproc+xml":{"source":"apache","compressible":true,"extensions":["xpl"]},"application/xslt+xml":{"source":"iana","compressible":true,"extensions":["xslt"]},"application/xspf+xml":{"source":"apache","compressible":true,"extensions":["xspf"]},"application/xv+xml":{"source":"iana","compressible":true,"extensions":["mxml","xhvml","xvml","xvm"]},"application/yang":{"source":"iana","extensions":["yang"]},"application/yang-data+json":{"source":"iana","compressible":true},"application/yang-data+xml":{"source":"iana","compressible":true},"application/yang-patch+json":{"source":"iana","compressible":true},"application/yang-patch+xml":{"source":"iana","compressible":true},"application/yin+xml":{"source":"iana","compressible":true,"extensions":["yin"]},"application/zip":{"source":"iana","compressible":false,"extensions":["zip"]},"application/zlib":{"source":"iana"},"application/zstd":{"source":"iana"},"audio/1d-interleaved-parityfec":{"source":"iana"},"audio/32kadpcm":{"source":"iana"},"audio/3gpp":{"source":"iana","compressible":false,"extensions":["3gpp"]},"audio/3gpp2":{"source":"iana"},"audio/aac":{"source":"iana"},"audio/ac3":{"source":"iana"},"audio/adpcm":{"source":"apache","extensions":["adp"]},"audio/amr":{"source":"iana"},"audio/amr-wb":{"source":"iana"},"audio/amr-wb+":{"source":"iana"},"audio/aptx":{"source":"iana"},"audio/asc":{"source":"iana"},"audio/atrac-advanced-lossless":{"source":"iana"},"audio/atrac-x":{"source":"iana"},"audio/atrac3":{"source":"iana"},"audio/basic":{"source":"iana","compressible":false,"extensions":["au","snd"]},"audio/bv16":{"source":"iana"},"audio/bv32":{"source":"iana"},"audio/clearmode":{"source":"iana"},"audio/cn":{"source":"iana"},"audio/dat12":{"source":"iana"},"audio/dls":{"source":"iana"},"audio/dsr-es201108":{"source":"iana"},"audio/dsr-es202050":{"source":"iana"},"audio/dsr-es202211":{"source":"iana"},"audio/dsr-es202212":{"source":"iana"},"audio/dv":{"source":"iana"},"audio/dvi4":{"source":"iana"},"audio/eac3":{"source":"iana"},"audio/encaprtp":{"source":"iana"},"audio/evrc":{"source":"iana"},"audio/evrc-qcp":{"source":"iana"},"audio/evrc0":{"source":"iana"},"audio/evrc1":{"source":"iana"},"audio/evrcb":{"source":"iana"},"audio/evrcb0":{"source":"iana"},"audio/evrcb1":{"source":"iana"},"audio/evrcnw":{"source":"iana"},"audio/evrcnw0":{"source":"iana"},"audio/evrcnw1":{"source":"iana"},"audio/evrcwb":{"source":"iana"},"audio/evrcwb0":{"source":"iana"},"audio/evrcwb1":{"source":"iana"},"audio/evs":{"source":"iana"},"audio/flexfec":{"source":"iana"},"audio/fwdred":{"source":"iana"},"audio/g711-0":{"source":"iana"},"audio/g719":{"source":"iana"},"audio/g722":{"source":"iana"},"audio/g7221":{"source":"iana"},"audio/g723":{"source":"iana"},"audio/g726-16":{"source":"iana"},"audio/g726-24":{"source":"iana"},"audio/g726-32":{"source":"iana"},"audio/g726-40":{"source":"iana"},"audio/g728":{"source":"iana"},"audio/g729":{"source":"iana"},"audio/g7291":{"source":"iana"},"audio/g729d":{"source":"iana"},"audio/g729e":{"source":"iana"},"audio/gsm":{"source":"iana"},"audio/gsm-efr":{"source":"iana"},"audio/gsm-hr-08":{"source":"iana"},"audio/ilbc":{"source":"iana"},"audio/ip-mr_v2.5":{"source":"iana"},"audio/isac":{"source":"apache"},"audio/l16":{"source":"iana"},"audio/l20":{"source":"iana"},"audio/l24":{"source":"iana","compressible":false},"audio/l8":{"source":"iana"},"audio/lpc":{"source":"iana"},"audio/melp":{"source":"iana"},"audio/melp1200":{"source":"iana"},"audio/melp2400":{"source":"iana"},"audio/melp600":{"source":"iana"},"audio/midi":{"source":"apache","extensions":["mid","midi","kar","rmi"]},"audio/mobile-xmf":{"source":"iana","extensions":["mxmf"]},"audio/mp3":{"compressible":false,"extensions":["mp3"]},"audio/mp4":{"source":"iana","compressible":false,"extensions":["m4a","mp4a"]},"audio/mp4a-latm":{"source":"iana"},"audio/mpa":{"source":"iana"},"audio/mpa-robust":{"source":"iana"},"audio/mpeg":{"source":"iana","compressible":false,"extensions":["mpga","mp2","mp2a","mp3","m2a","m3a"]},"audio/mpeg4-generic":{"source":"iana"},"audio/musepack":{"source":"apache"},"audio/ogg":{"source":"iana","compressible":false,"extensions":["oga","ogg","spx"]},"audio/opus":{"source":"iana"},"audio/parityfec":{"source":"iana"},"audio/pcma":{"source":"iana"},"audio/pcma-wb":{"source":"iana"},"audio/pcmu":{"source":"iana"},"audio/pcmu-wb":{"source":"iana"},"audio/prs.sid":{"source":"iana"},"audio/qcelp":{"source":"iana"},"audio/raptorfec":{"source":"iana"},"audio/red":{"source":"iana"},"audio/rtp-enc-aescm128":{"source":"iana"},"audio/rtp-midi":{"source":"iana"},"audio/rtploopback":{"source":"iana"},"audio/rtx":{"source":"iana"},"audio/s3m":{"source":"apache","extensions":["s3m"]},"audio/silk":{"source":"apache","extensions":["sil"]},"audio/smv":{"source":"iana"},"audio/smv-qcp":{"source":"iana"},"audio/smv0":{"source":"iana"},"audio/sp-midi":{"source":"iana"},"audio/speex":{"source":"iana"},"audio/t140c":{"source":"iana"},"audio/t38":{"source":"iana"},"audio/telephone-event":{"source":"iana"},"audio/tetra_acelp":{"source":"iana"},"audio/tone":{"source":"iana"},"audio/uemclip":{"source":"iana"},"audio/ulpfec":{"source":"iana"},"audio/usac":{"source":"iana"},"audio/vdvi":{"source":"iana"},"audio/vmr-wb":{"source":"iana"},"audio/vnd.3gpp.iufp":{"source":"iana"},"audio/vnd.4sb":{"source":"iana"},"audio/vnd.audiokoz":{"source":"iana"},"audio/vnd.celp":{"source":"iana"},"audio/vnd.cisco.nse":{"source":"iana"},"audio/vnd.cmles.radio-events":{"source":"iana"},"audio/vnd.cns.anp1":{"source":"iana"},"audio/vnd.cns.inf1":{"source":"iana"},"audio/vnd.dece.audio":{"source":"iana","extensions":["uva","uvva"]},"audio/vnd.digital-winds":{"source":"iana","extensions":["eol"]},"audio/vnd.dlna.adts":{"source":"iana"},"audio/vnd.dolby.heaac.1":{"source":"iana"},"audio/vnd.dolby.heaac.2":{"source":"iana"},"audio/vnd.dolby.mlp":{"source":"iana"},"audio/vnd.dolby.mps":{"source":"iana"},"audio/vnd.dolby.pl2":{"source":"iana"},"audio/vnd.dolby.pl2x":{"source":"iana"},"audio/vnd.dolby.pl2z":{"source":"iana"},"audio/vnd.dolby.pulse.1":{"source":"iana"},"audio/vnd.dra":{"source":"iana","extensions":["dra"]},"audio/vnd.dts":{"source":"iana","extensions":["dts"]},"audio/vnd.dts.hd":{"source":"iana","extensions":["dtshd"]},"audio/vnd.dts.uhd":{"source":"iana"},"audio/vnd.dvb.file":{"source":"iana"},"audio/vnd.everad.plj":{"source":"iana"},"audio/vnd.hns.audio":{"source":"iana"},"audio/vnd.lucent.voice":{"source":"iana","extensions":["lvp"]},"audio/vnd.ms-playready.media.pya":{"source":"iana","extensions":["pya"]},"audio/vnd.nokia.mobile-xmf":{"source":"iana"},"audio/vnd.nortel.vbk":{"source":"iana"},"audio/vnd.nuera.ecelp4800":{"source":"iana","extensions":["ecelp4800"]},"audio/vnd.nuera.ecelp7470":{"source":"iana","extensions":["ecelp7470"]},"audio/vnd.nuera.ecelp9600":{"source":"iana","extensions":["ecelp9600"]},"audio/vnd.octel.sbc":{"source":"iana"},"audio/vnd.presonus.multitrack":{"source":"iana"},"audio/vnd.qcelp":{"source":"iana"},"audio/vnd.rhetorex.32kadpcm":{"source":"iana"},"audio/vnd.rip":{"source":"iana","extensions":["rip"]},"audio/vnd.rn-realaudio":{"compressible":false},"audio/vnd.sealedmedia.softseal.mpeg":{"source":"iana"},"audio/vnd.vmx.cvsd":{"source":"iana"},"audio/vnd.wave":{"compressible":false},"audio/vorbis":{"source":"iana","compressible":false},"audio/vorbis-config":{"source":"iana"},"audio/wav":{"compressible":false,"extensions":["wav"]},"audio/wave":{"compressible":false,"extensions":["wav"]},"audio/webm":{"source":"apache","compressible":false,"extensions":["weba"]},"audio/x-aac":{"source":"apache","compressible":false,"extensions":["aac"]},"audio/x-aiff":{"source":"apache","extensions":["aif","aiff","aifc"]},"audio/x-caf":{"source":"apache","compressible":false,"extensions":["caf"]},"audio/x-flac":{"source":"apache","extensions":["flac"]},"audio/x-m4a":{"source":"nginx","extensions":["m4a"]},"audio/x-matroska":{"source":"apache","extensions":["mka"]},"audio/x-mpegurl":{"source":"apache","extensions":["m3u"]},"audio/x-ms-wax":{"source":"apache","extensions":["wax"]},"audio/x-ms-wma":{"source":"apache","extensions":["wma"]},"audio/x-pn-realaudio":{"source":"apache","extensions":["ram","ra"]},"audio/x-pn-realaudio-plugin":{"source":"apache","extensions":["rmp"]},"audio/x-realaudio":{"source":"nginx","extensions":["ra"]},"audio/x-tta":{"source":"apache"},"audio/x-wav":{"source":"apache","extensions":["wav"]},"audio/xm":{"source":"apache","extensions":["xm"]},"chemical/x-cdx":{"source":"apache","extensions":["cdx"]},"chemical/x-cif":{"source":"apache","extensions":["cif"]},"chemical/x-cmdf":{"source":"apache","extensions":["cmdf"]},"chemical/x-cml":{"source":"apache","extensions":["cml"]},"chemical/x-csml":{"source":"apache","extensions":["csml"]},"chemical/x-pdb":{"source":"apache"},"chemical/x-xyz":{"source":"apache","extensions":["xyz"]},"font/collection":{"source":"iana","extensions":["ttc"]},"font/otf":{"source":"iana","compressible":true,"extensions":["otf"]},"font/sfnt":{"source":"iana"},"font/ttf":{"source":"iana","compressible":true,"extensions":["ttf"]},"font/woff":{"source":"iana","extensions":["woff"]},"font/woff2":{"source":"iana","extensions":["woff2"]},"image/aces":{"source":"iana","extensions":["exr"]},"image/apng":{"compressible":false,"extensions":["apng"]},"image/avci":{"source":"iana"},"image/avcs":{"source":"iana"},"image/bmp":{"source":"iana","compressible":true,"extensions":["bmp"]},"image/cgm":{"source":"iana","extensions":["cgm"]},"image/dicom-rle":{"source":"iana","extensions":["drle"]},"image/emf":{"source":"iana","extensions":["emf"]},"image/fits":{"source":"iana","extensions":["fits"]},"image/g3fax":{"source":"iana","extensions":["g3"]},"image/gif":{"source":"iana","compressible":false,"extensions":["gif"]},"image/heic":{"source":"iana","extensions":["heic"]},"image/heic-sequence":{"source":"iana","extensions":["heics"]},"image/heif":{"source":"iana","extensions":["heif"]},"image/heif-sequence":{"source":"iana","extensions":["heifs"]},"image/hej2k":{"source":"iana","extensions":["hej2"]},"image/hsj2":{"source":"iana","extensions":["hsj2"]},"image/ief":{"source":"iana","extensions":["ief"]},"image/jls":{"source":"iana","extensions":["jls"]},"image/jp2":{"source":"iana","compressible":false,"extensions":["jp2","jpg2"]},"image/jpeg":{"source":"iana","compressible":false,"extensions":["jpeg","jpg","jpe"]},"image/jph":{"source":"iana","extensions":["jph"]},"image/jphc":{"source":"iana","extensions":["jhc"]},"image/jpm":{"source":"iana","compressible":false,"extensions":["jpm"]},"image/jpx":{"source":"iana","compressible":false,"extensions":["jpx","jpf"]},"image/jxr":{"source":"iana","extensions":["jxr"]},"image/jxra":{"source":"iana","extensions":["jxra"]},"image/jxrs":{"source":"iana","extensions":["jxrs"]},"image/jxs":{"source":"iana","extensions":["jxs"]},"image/jxsc":{"source":"iana","extensions":["jxsc"]},"image/jxsi":{"source":"iana","extensions":["jxsi"]},"image/jxss":{"source":"iana","extensions":["jxss"]},"image/ktx":{"source":"iana","extensions":["ktx"]},"image/naplps":{"source":"iana"},"image/pjpeg":{"compressible":false},"image/png":{"source":"iana","compressible":false,"extensions":["png"]},"image/prs.btif":{"source":"iana","extensions":["btif"]},"image/prs.pti":{"source":"iana","extensions":["pti"]},"image/pwg-raster":{"source":"iana"},"image/sgi":{"source":"apache","extensions":["sgi"]},"image/svg+xml":{"source":"iana","compressible":true,"extensions":["svg","svgz"]},"image/t38":{"source":"iana","extensions":["t38"]},"image/tiff":{"source":"iana","compressible":false,"extensions":["tif","tiff"]},"image/tiff-fx":{"source":"iana","extensions":["tfx"]},"image/vnd.adobe.photoshop":{"source":"iana","compressible":true,"extensions":["psd"]},"image/vnd.airzip.accelerator.azv":{"source":"iana","extensions":["azv"]},"image/vnd.cns.inf2":{"source":"iana"},"image/vnd.dece.graphic":{"source":"iana","extensions":["uvi","uvvi","uvg","uvvg"]},"image/vnd.djvu":{"source":"iana","extensions":["djvu","djv"]},"image/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"image/vnd.dwg":{"source":"iana","extensions":["dwg"]},"image/vnd.dxf":{"source":"iana","extensions":["dxf"]},"image/vnd.fastbidsheet":{"source":"iana","extensions":["fbs"]},"image/vnd.fpx":{"source":"iana","extensions":["fpx"]},"image/vnd.fst":{"source":"iana","extensions":["fst"]},"image/vnd.fujixerox.edmics-mmr":{"source":"iana","extensions":["mmr"]},"image/vnd.fujixerox.edmics-rlc":{"source":"iana","extensions":["rlc"]},"image/vnd.globalgraphics.pgb":{"source":"iana"},"image/vnd.microsoft.icon":{"source":"iana","extensions":["ico"]},"image/vnd.mix":{"source":"iana"},"image/vnd.mozilla.apng":{"source":"iana"},"image/vnd.ms-dds":{"extensions":["dds"]},"image/vnd.ms-modi":{"source":"iana","extensions":["mdi"]},"image/vnd.ms-photo":{"source":"apache","extensions":["wdp"]},"image/vnd.net-fpx":{"source":"iana","extensions":["npx"]},"image/vnd.radiance":{"source":"iana"},"image/vnd.sealed.png":{"source":"iana"},"image/vnd.sealedmedia.softseal.gif":{"source":"iana"},"image/vnd.sealedmedia.softseal.jpg":{"source":"iana"},"image/vnd.svf":{"source":"iana"},"image/vnd.tencent.tap":{"source":"iana","extensions":["tap"]},"image/vnd.valve.source.texture":{"source":"iana","extensions":["vtf"]},"image/vnd.wap.wbmp":{"source":"iana","extensions":["wbmp"]},"image/vnd.xiff":{"source":"iana","extensions":["xif"]},"image/vnd.zbrush.pcx":{"source":"iana","extensions":["pcx"]},"image/webp":{"source":"apache","extensions":["webp"]},"image/wmf":{"source":"iana","extensions":["wmf"]},"image/x-3ds":{"source":"apache","extensions":["3ds"]},"image/x-cmu-raster":{"source":"apache","extensions":["ras"]},"image/x-cmx":{"source":"apache","extensions":["cmx"]},"image/x-freehand":{"source":"apache","extensions":["fh","fhc","fh4","fh5","fh7"]},"image/x-icon":{"source":"apache","compressible":true,"extensions":["ico"]},"image/x-jng":{"source":"nginx","extensions":["jng"]},"image/x-mrsid-image":{"source":"apache","extensions":["sid"]},"image/x-ms-bmp":{"source":"nginx","compressible":true,"extensions":["bmp"]},"image/x-pcx":{"source":"apache","extensions":["pcx"]},"image/x-pict":{"source":"apache","extensions":["pic","pct"]},"image/x-portable-anymap":{"source":"apache","extensions":["pnm"]},"image/x-portable-bitmap":{"source":"apache","extensions":["pbm"]},"image/x-portable-graymap":{"source":"apache","extensions":["pgm"]},"image/x-portable-pixmap":{"source":"apache","extensions":["ppm"]},"image/x-rgb":{"source":"apache","extensions":["rgb"]},"image/x-tga":{"source":"apache","extensions":["tga"]},"image/x-xbitmap":{"source":"apache","extensions":["xbm"]},"image/x-xcf":{"compressible":false},"image/x-xpixmap":{"source":"apache","extensions":["xpm"]},"image/x-xwindowdump":{"source":"apache","extensions":["xwd"]},"message/cpim":{"source":"iana"},"message/delivery-status":{"source":"iana"},"message/disposition-notification":{"source":"iana","extensions":["disposition-notification"]},"message/external-body":{"source":"iana"},"message/feedback-report":{"source":"iana"},"message/global":{"source":"iana","extensions":["u8msg"]},"message/global-delivery-status":{"source":"iana","extensions":["u8dsn"]},"message/global-disposition-notification":{"source":"iana","extensions":["u8mdn"]},"message/global-headers":{"source":"iana","extensions":["u8hdr"]},"message/http":{"source":"iana","compressible":false},"message/imdn+xml":{"source":"iana","compressible":true},"message/news":{"source":"iana"},"message/partial":{"source":"iana","compressible":false},"message/rfc822":{"source":"iana","compressible":true,"extensions":["eml","mime"]},"message/s-http":{"source":"iana"},"message/sip":{"source":"iana"},"message/sipfrag":{"source":"iana"},"message/tracking-status":{"source":"iana"},"message/vnd.si.simp":{"source":"iana"},"message/vnd.wfa.wsc":{"source":"iana","extensions":["wsc"]},"model/3mf":{"source":"iana","extensions":["3mf"]},"model/gltf+json":{"source":"iana","compressible":true,"extensions":["gltf"]},"model/gltf-binary":{"source":"iana","compressible":true,"extensions":["glb"]},"model/iges":{"source":"iana","compressible":false,"extensions":["igs","iges"]},"model/mesh":{"source":"iana","compressible":false,"extensions":["msh","mesh","silo"]},"model/stl":{"source":"iana","extensions":["stl"]},"model/vnd.collada+xml":{"source":"iana","compressible":true,"extensions":["dae"]},"model/vnd.dwf":{"source":"iana","extensions":["dwf"]},"model/vnd.flatland.3dml":{"source":"iana"},"model/vnd.gdl":{"source":"iana","extensions":["gdl"]},"model/vnd.gs-gdl":{"source":"apache"},"model/vnd.gs.gdl":{"source":"iana"},"model/vnd.gtw":{"source":"iana","extensions":["gtw"]},"model/vnd.moml+xml":{"source":"iana","compressible":true},"model/vnd.mts":{"source":"iana","extensions":["mts"]},"model/vnd.opengex":{"source":"iana","extensions":["ogex"]},"model/vnd.parasolid.transmit.binary":{"source":"iana","extensions":["x_b"]},"model/vnd.parasolid.transmit.text":{"source":"iana","extensions":["x_t"]},"model/vnd.rosette.annotated-data-model":{"source":"iana"},"model/vnd.usdz+zip":{"source":"iana","compressible":false,"extensions":["usdz"]},"model/vnd.valve.source.compiled-map":{"source":"iana","extensions":["bsp"]},"model/vnd.vtu":{"source":"iana","extensions":["vtu"]},"model/vrml":{"source":"iana","compressible":false,"extensions":["wrl","vrml"]},"model/x3d+binary":{"source":"apache","compressible":false,"extensions":["x3db","x3dbz"]},"model/x3d+fastinfoset":{"source":"iana","extensions":["x3db"]},"model/x3d+vrml":{"source":"apache","compressible":false,"extensions":["x3dv","x3dvz"]},"model/x3d+xml":{"source":"iana","compressible":true,"extensions":["x3d","x3dz"]},"model/x3d-vrml":{"source":"iana","extensions":["x3dv"]},"multipart/alternative":{"source":"iana","compressible":false},"multipart/appledouble":{"source":"iana"},"multipart/byteranges":{"source":"iana"},"multipart/digest":{"source":"iana"},"multipart/encrypted":{"source":"iana","compressible":false},"multipart/form-data":{"source":"iana","compressible":false},"multipart/header-set":{"source":"iana"},"multipart/mixed":{"source":"iana"},"multipart/multilingual":{"source":"iana"},"multipart/parallel":{"source":"iana"},"multipart/related":{"source":"iana","compressible":false},"multipart/report":{"source":"iana"},"multipart/signed":{"source":"iana","compressible":false},"multipart/vnd.bint.med-plus":{"source":"iana"},"multipart/voice-message":{"source":"iana"},"multipart/x-mixed-replace":{"source":"iana"},"text/1d-interleaved-parityfec":{"source":"iana"},"text/cache-manifest":{"source":"iana","compressible":true,"extensions":["appcache","manifest"]},"text/calendar":{"source":"iana","extensions":["ics","ifb"]},"text/calender":{"compressible":true},"text/cmd":{"compressible":true},"text/coffeescript":{"extensions":["coffee","litcoffee"]},"text/css":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["css"]},"text/csv":{"source":"iana","compressible":true,"extensions":["csv"]},"text/csv-schema":{"source":"iana"},"text/directory":{"source":"iana"},"text/dns":{"source":"iana"},"text/ecmascript":{"source":"iana"},"text/encaprtp":{"source":"iana"},"text/enriched":{"source":"iana"},"text/flexfec":{"source":"iana"},"text/fwdred":{"source":"iana"},"text/grammar-ref-list":{"source":"iana"},"text/html":{"source":"iana","compressible":true,"extensions":["html","htm","shtml"]},"text/jade":{"extensions":["jade"]},"text/javascript":{"source":"iana","compressible":true},"text/jcr-cnd":{"source":"iana"},"text/jsx":{"compressible":true,"extensions":["jsx"]},"text/less":{"compressible":true,"extensions":["less"]},"text/markdown":{"source":"iana","compressible":true,"extensions":["markdown","md"]},"text/mathml":{"source":"nginx","extensions":["mml"]},"text/mdx":{"compressible":true,"extensions":["mdx"]},"text/mizar":{"source":"iana"},"text/n3":{"source":"iana","compressible":true,"extensions":["n3"]},"text/parameters":{"source":"iana"},"text/parityfec":{"source":"iana"},"text/plain":{"source":"iana","compressible":true,"extensions":["txt","text","conf","def","list","log","in","ini"]},"text/provenance-notation":{"source":"iana"},"text/prs.fallenstein.rst":{"source":"iana"},"text/prs.lines.tag":{"source":"iana","extensions":["dsc"]},"text/prs.prop.logic":{"source":"iana"},"text/raptorfec":{"source":"iana"},"text/red":{"source":"iana"},"text/rfc822-headers":{"source":"iana"},"text/richtext":{"source":"iana","compressible":true,"extensions":["rtx"]},"text/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"text/rtp-enc-aescm128":{"source":"iana"},"text/rtploopback":{"source":"iana"},"text/rtx":{"source":"iana"},"text/sgml":{"source":"iana","extensions":["sgml","sgm"]},"text/shex":{"extensions":["shex"]},"text/slim":{"extensions":["slim","slm"]},"text/strings":{"source":"iana"},"text/stylus":{"extensions":["stylus","styl"]},"text/t140":{"source":"iana"},"text/tab-separated-values":{"source":"iana","compressible":true,"extensions":["tsv"]},"text/troff":{"source":"iana","extensions":["t","tr","roff","man","me","ms"]},"text/turtle":{"source":"iana","charset":"UTF-8","extensions":["ttl"]},"text/ulpfec":{"source":"iana"},"text/uri-list":{"source":"iana","compressible":true,"extensions":["uri","uris","urls"]},"text/vcard":{"source":"iana","compressible":true,"extensions":["vcard"]},"text/vnd.a":{"source":"iana"},"text/vnd.abc":{"source":"iana"},"text/vnd.ascii-art":{"source":"iana"},"text/vnd.curl":{"source":"iana","extensions":["curl"]},"text/vnd.curl.dcurl":{"source":"apache","extensions":["dcurl"]},"text/vnd.curl.mcurl":{"source":"apache","extensions":["mcurl"]},"text/vnd.curl.scurl":{"source":"apache","extensions":["scurl"]},"text/vnd.debian.copyright":{"source":"iana"},"text/vnd.dmclientscript":{"source":"iana"},"text/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"text/vnd.esmertec.theme-descriptor":{"source":"iana"},"text/vnd.ficlab.flt":{"source":"iana"},"text/vnd.fly":{"source":"iana","extensions":["fly"]},"text/vnd.fmi.flexstor":{"source":"iana","extensions":["flx"]},"text/vnd.gml":{"source":"iana"},"text/vnd.graphviz":{"source":"iana","extensions":["gv"]},"text/vnd.hgl":{"source":"iana"},"text/vnd.in3d.3dml":{"source":"iana","extensions":["3dml"]},"text/vnd.in3d.spot":{"source":"iana","extensions":["spot"]},"text/vnd.iptc.newsml":{"source":"iana"},"text/vnd.iptc.nitf":{"source":"iana"},"text/vnd.latex-z":{"source":"iana"},"text/vnd.motorola.reflex":{"source":"iana"},"text/vnd.ms-mediapackage":{"source":"iana"},"text/vnd.net2phone.commcenter.command":{"source":"iana"},"text/vnd.radisys.msml-basic-layout":{"source":"iana"},"text/vnd.senx.warpscript":{"source":"iana"},"text/vnd.si.uricatalogue":{"source":"iana"},"text/vnd.sosi":{"source":"iana"},"text/vnd.sun.j2me.app-descriptor":{"source":"iana","extensions":["jad"]},"text/vnd.trolltech.linguist":{"source":"iana"},"text/vnd.wap.si":{"source":"iana"},"text/vnd.wap.sl":{"source":"iana"},"text/vnd.wap.wml":{"source":"iana","extensions":["wml"]},"text/vnd.wap.wmlscript":{"source":"iana","extensions":["wmls"]},"text/vtt":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["vtt"]},"text/x-asm":{"source":"apache","extensions":["s","asm"]},"text/x-c":{"source":"apache","extensions":["c","cc","cxx","cpp","h","hh","dic"]},"text/x-component":{"source":"nginx","extensions":["htc"]},"text/x-fortran":{"source":"apache","extensions":["f","for","f77","f90"]},"text/x-gwt-rpc":{"compressible":true},"text/x-handlebars-template":{"extensions":["hbs"]},"text/x-java-source":{"source":"apache","extensions":["java"]},"text/x-jquery-tmpl":{"compressible":true},"text/x-lua":{"extensions":["lua"]},"text/x-markdown":{"compressible":true,"extensions":["mkd"]},"text/x-nfo":{"source":"apache","extensions":["nfo"]},"text/x-opml":{"source":"apache","extensions":["opml"]},"text/x-org":{"compressible":true,"extensions":["org"]},"text/x-pascal":{"source":"apache","extensions":["p","pas"]},"text/x-processing":{"compressible":true,"extensions":["pde"]},"text/x-sass":{"extensions":["sass"]},"text/x-scss":{"extensions":["scss"]},"text/x-setext":{"source":"apache","extensions":["etx"]},"text/x-sfv":{"source":"apache","extensions":["sfv"]},"text/x-suse-ymp":{"compressible":true,"extensions":["ymp"]},"text/x-uuencode":{"source":"apache","extensions":["uu"]},"text/x-vcalendar":{"source":"apache","extensions":["vcs"]},"text/x-vcard":{"source":"apache","extensions":["vcf"]},"text/xml":{"source":"iana","compressible":true,"extensions":["xml"]},"text/xml-external-parsed-entity":{"source":"iana"},"text/yaml":{"extensions":["yaml","yml"]},"video/1d-interleaved-parityfec":{"source":"iana"},"video/3gpp":{"source":"iana","extensions":["3gp","3gpp"]},"video/3gpp-tt":{"source":"iana"},"video/3gpp2":{"source":"iana","extensions":["3g2"]},"video/bmpeg":{"source":"iana"},"video/bt656":{"source":"iana"},"video/celb":{"source":"iana"},"video/dv":{"source":"iana"},"video/encaprtp":{"source":"iana"},"video/flexfec":{"source":"iana"},"video/h261":{"source":"iana","extensions":["h261"]},"video/h263":{"source":"iana","extensions":["h263"]},"video/h263-1998":{"source":"iana"},"video/h263-2000":{"source":"iana"},"video/h264":{"source":"iana","extensions":["h264"]},"video/h264-rcdo":{"source":"iana"},"video/h264-svc":{"source":"iana"},"video/h265":{"source":"iana"},"video/iso.segment":{"source":"iana"},"video/jpeg":{"source":"iana","extensions":["jpgv"]},"video/jpeg2000":{"source":"iana"},"video/jpm":{"source":"apache","extensions":["jpm","jpgm"]},"video/mj2":{"source":"iana","extensions":["mj2","mjp2"]},"video/mp1s":{"source":"iana"},"video/mp2p":{"source":"iana"},"video/mp2t":{"source":"iana","extensions":["ts"]},"video/mp4":{"source":"iana","compressible":false,"extensions":["mp4","mp4v","mpg4"]},"video/mp4v-es":{"source":"iana"},"video/mpeg":{"source":"iana","compressible":false,"extensions":["mpeg","mpg","mpe","m1v","m2v"]},"video/mpeg4-generic":{"source":"iana"},"video/mpv":{"source":"iana"},"video/nv":{"source":"iana"},"video/ogg":{"source":"iana","compressible":false,"extensions":["ogv"]},"video/parityfec":{"source":"iana"},"video/pointer":{"source":"iana"},"video/quicktime":{"source":"iana","compressible":false,"extensions":["qt","mov"]},"video/raptorfec":{"source":"iana"},"video/raw":{"source":"iana"},"video/rtp-enc-aescm128":{"source":"iana"},"video/rtploopback":{"source":"iana"},"video/rtx":{"source":"iana"},"video/smpte291":{"source":"iana"},"video/smpte292m":{"source":"iana"},"video/ulpfec":{"source":"iana"},"video/vc1":{"source":"iana"},"video/vc2":{"source":"iana"},"video/vnd.cctv":{"source":"iana"},"video/vnd.dece.hd":{"source":"iana","extensions":["uvh","uvvh"]},"video/vnd.dece.mobile":{"source":"iana","extensions":["uvm","uvvm"]},"video/vnd.dece.mp4":{"source":"iana"},"video/vnd.dece.pd":{"source":"iana","extensions":["uvp","uvvp"]},"video/vnd.dece.sd":{"source":"iana","extensions":["uvs","uvvs"]},"video/vnd.dece.video":{"source":"iana","extensions":["uvv","uvvv"]},"video/vnd.directv.mpeg":{"source":"iana"},"video/vnd.directv.mpeg-tts":{"source":"iana"},"video/vnd.dlna.mpeg-tts":{"source":"iana"},"video/vnd.dvb.file":{"source":"iana","extensions":["dvb"]},"video/vnd.fvt":{"source":"iana","extensions":["fvt"]},"video/vnd.hns.video":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.ttsavc":{"source":"iana"},"video/vnd.iptvforum.ttsmpeg2":{"source":"iana"},"video/vnd.motorola.video":{"source":"iana"},"video/vnd.motorola.videop":{"source":"iana"},"video/vnd.mpegurl":{"source":"iana","extensions":["mxu","m4u"]},"video/vnd.ms-playready.media.pyv":{"source":"iana","extensions":["pyv"]},"video/vnd.nokia.interleaved-multimedia":{"source":"iana"},"video/vnd.nokia.mp4vr":{"source":"iana"},"video/vnd.nokia.videovoip":{"source":"iana"},"video/vnd.objectvideo":{"source":"iana"},"video/vnd.radgamettools.bink":{"source":"iana"},"video/vnd.radgamettools.smacker":{"source":"iana"},"video/vnd.sealed.mpeg1":{"source":"iana"},"video/vnd.sealed.mpeg4":{"source":"iana"},"video/vnd.sealed.swf":{"source":"iana"},"video/vnd.sealedmedia.softseal.mov":{"source":"iana"},"video/vnd.uvvu.mp4":{"source":"iana","extensions":["uvu","uvvu"]},"video/vnd.vivo":{"source":"iana","extensions":["viv"]},"video/vnd.youtube.yt":{"source":"iana"},"video/vp8":{"source":"iana"},"video/webm":{"source":"apache","compressible":false,"extensions":["webm"]},"video/x-f4v":{"source":"apache","extensions":["f4v"]},"video/x-fli":{"source":"apache","extensions":["fli"]},"video/x-flv":{"source":"apache","compressible":false,"extensions":["flv"]},"video/x-m4v":{"source":"apache","extensions":["m4v"]},"video/x-matroska":{"source":"apache","compressible":false,"extensions":["mkv","mk3d","mks"]},"video/x-mng":{"source":"apache","extensions":["mng"]},"video/x-ms-asf":{"source":"apache","extensions":["asf","asx"]},"video/x-ms-vob":{"source":"apache","extensions":["vob"]},"video/x-ms-wm":{"source":"apache","extensions":["wm"]},"video/x-ms-wmv":{"source":"apache","compressible":false,"extensions":["wmv"]},"video/x-ms-wmx":{"source":"apache","extensions":["wmx"]},"video/x-ms-wvx":{"source":"apache","extensions":["wvx"]},"video/x-msvideo":{"source":"apache","extensions":["avi"]},"video/x-sgi-movie":{"source":"apache","extensions":["movie"]},"video/x-smv":{"source":"apache","extensions":["smv"]},"x-conference/x-cooltalk":{"source":"apache","extensions":["ice"]},"x-shader/x-fragment":{"compressible":true},"x-shader/x-vertex":{"compressible":true}}; + +/***/ }), + +/***/ 547: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var util = __webpack_require__(669); +var Stream = __webpack_require__(413).Stream; +var DelayedStream = __webpack_require__(152); + +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; + + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); + +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } + + return combinedStream; +}; + +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; + } + + this._handleErrors(stream); + + if (this.pauseStreams) { + stream.pause(); + } + } + + this._streams.push(stream); + return this; +}; + +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } + + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; + +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; + } + + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } + + this._pipeNext(stream); + }.bind(this)); +}; + +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; + + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + + var value = stream; + this.write(value); + this._getNext(); +}; + +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; + +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; + + +/***/ }), + +/***/ 563: +/***/ (function(module) { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; + + +/***/ }), + +/***/ 566: +/***/ (function(module) { + +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} + + +/***/ }), + +/***/ 574: +/***/ (function(module, exports, __webpack_require__) { + +var Stream = __webpack_require__(413); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __webpack_require__(226); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __webpack_require__(241); + exports.Duplex = __webpack_require__(831); + exports.Transform = __webpack_require__(925); + exports.PassThrough = __webpack_require__(882); + exports.finished = __webpack_require__(287); + exports.pipeline = __webpack_require__(238); +} + + +/***/ }), + +/***/ 581: +/***/ (function(module) { + +"use strict"; + + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; + + +/***/ }), + +/***/ 589: +/***/ (function(module) { + +"use strict"; + + +const BYTE_UNITS = [ + 'B', + 'kB', + 'MB', + 'GB', + 'TB', + 'PB', + 'EB', + 'ZB', + 'YB' +]; + +const BIT_UNITS = [ + 'b', + 'kbit', + 'Mbit', + 'Gbit', + 'Tbit', + 'Pbit', + 'Ebit', + 'Zbit', + 'Ybit' +]; + +/* +Formats the given number using `Number#toLocaleString`. +- If locale is a string, the value is expected to be a locale-key (for example: `de`). +- If locale is true, the system default locale is used for translation. +- If no value for locale is specified, the number is returned unmodified. +*/ +const toLocaleString = (number, locale) => { + let result = number; + if (typeof locale === 'string') { + result = number.toLocaleString(locale); + } else if (locale === true) { + result = number.toLocaleString(); + } + + return result; +}; + +module.exports = (number, options) => { + if (!Number.isFinite(number)) { + throw new TypeError(`Expected a finite number, got ${typeof number}: ${number}`); + } + + options = Object.assign({bits: false}, options); + const UNITS = options.bits ? BIT_UNITS : BYTE_UNITS; + + if (options.signed && number === 0) { + return ' 0 ' + UNITS[0]; + } + + const isNegative = number < 0; + const prefix = isNegative ? '-' : (options.signed ? '+' : ''); + + if (isNegative) { + number = -number; + } + + if (number < 1) { + const numberString = toLocaleString(number, options.locale); + return prefix + numberString + ' ' + UNITS[0]; + } + + const exponent = Math.min(Math.floor(Math.log10(number) / 3), UNITS.length - 1); + // eslint-disable-next-line unicorn/prefer-exponentiation-operator + number = Number((number / Math.pow(1000, exponent)).toPrecision(3)); + const numberString = toLocaleString(number, options.locale); + + const unit = UNITS[exponent]; + + return prefix + numberString + ' ' + unit; +}; + + +/***/ }), + +/***/ 605: +/***/ (function(module) { + +module.exports = require("http"); + +/***/ }), + +/***/ 614: +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), + +/***/ 622: +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), + +/***/ 669: +/***/ (function(module) { + +module.exports = require("util"); + +/***/ }), + +/***/ 674: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = __webpack_require__(149).Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 689: +/***/ (function(module, __unusedexports, __webpack_require__) { + +try { + var util = __webpack_require__(669); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __webpack_require__(315); +} + + +/***/ }), + +/***/ 722: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const fs = __webpack_require__(747) +const path = __webpack_require__(622) + +const { readdir, lstat } = fs.promises + +/** + * pathFilter lets you filter files based on a resolved `filepath`. + * @callback pathFilter + * @param {String} filepath - The resolved `filepath` of the file to test for filtering. + * + * @return {Boolean} Return false to filter the given `filepath` and true to include it. + */ +const pathFilter = filepath => true + +/** + * statFilter lets you filter files based on a lstat object. + * @callback statFilter + * @param {Object} st - A fs.Stats instance. + * + * @return {Boolean} Return false to filter the given `filepath` and true to include it. + */ +const statFilter = filepath => true + +/** + * FWStats is the object that the okdistribute/folder-walker module returns by default. + * + * @typedef FWStats + * @property {String} root - The filepath of the directory where the walk started. + * @property {String} filepath - The resolved filepath. + * @property {Object} stat - A fs.Stats instance. + * @property {String} relname - The relative path to `root`. + * @property {String} basename - The resolved filepath of the files containing directory. + */ + +/** + * shaper lets you change the shape of the returned file data from walk-time stats. + * @callback shaper + * @param {FWStats} fwStats - The same status object returned from folder-walker. + * + * @return {*} - Whatever you want returned from the directory walk. + */ +const shaper = ({ root, filepath, stat, relname, basename }) => filepath + +/** + * Options object + * + * @typedef Opts + * @property {pathFilter} [pathFilter] - A pathFilter cb. + * @property {statFilter} [statFilter] - A statFilter cb. + * @property {Number} [maxDepth=Infinity] - The maximum number of folders to walk down into. + * @property {shaper} [shaper] - A shaper cb. + */ + +/** + * Create an async generator that iterates over all folders and directories inside of `dirs`. + * + * @async + * @generator + * @function + * @public + * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths. + * @param {?(Opts)} opts - Options used for the directory walk. + * + * @yields {Promise} - An async iterator that returns anything. + */ +async function * asyncFolderWalker (dirs, opts) { + opts = Object.assign({ + fs, + pathFilter, + statFilter, + maxDepth: Infinity, + shaper + }, opts) + + const roots = [dirs].flat().filter(opts.pathFilter) + const pending = [] + + while (roots.length) { + const root = roots.shift() + pending.push(root) + + while (pending.length) { + const current = pending.shift() + if (typeof current === 'undefined') continue + const st = await lstat(current) + if ((!st.isDirectory() || depthLimiter(current, root, opts.maxDepth)) && opts.statFilter(st)) { + yield opts.shaper(fwShape(root, current, st)) + continue + } + + const files = await readdir(current) + files.sort() + + for (const file of files) { + var next = path.join(current, file) + if (opts.pathFilter(next)) pending.unshift(next) + } + if (current === root || !opts.statFilter(st)) continue + else yield opts.shaper(fwShape(root, current, st)) + } + } +} + +/** + * Generates the same shape as the folder-walker module. + * + * @function + * @private + * @param {String} root - Root filepath. + * @param {String} name - Target filepath. + * @param {Object} st - fs.Stat object. + * + * @return {FWStats} - Folder walker object. + */ +function fwShape (root, name, st) { + return { + root: root, + filepath: name, + stat: st, + relname: root === name ? path.basename(name) : path.relative(root, name), + basename: path.basename(name) + } +} + +/** + * Test if we are at maximum directory depth. + * + * @private + * @function + * @param {String} filePath - The resolved path of the target fille. + * @param {String} relativeTo - The root directory of the current walk. + * @param {Number} maxDepth - The maximum number of folders to descend into. + * + * @returns {Boolean} - Return true to signal stop descending. + */ +function depthLimiter (filePath, relativeTo, maxDepth) { + if (maxDepth === Infinity) return false + const rootDepth = relativeTo.split(path.sep).length + const fileDepth = filePath.split(path.sep).length + return fileDepth - rootDepth > maxDepth +} + +/** + * Async iterable collector + * + * @async + * @function + * @private + */ +async function all (iterator) { + const collect = [] + + for await (const result of iterator) { + collect.push(result) + } + + return collect +} + +/** + * allFiles gives you all files from the directory walk as an array. + * + * @async + * @function + * @public + * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths. + * @param {?(Opts)} opts - Options used for the directory walk. + * + * @returns {Promise} - An async iterator that returns anything. + */ +async function allFiles (...args) { + return all(asyncFolderWalker(...args)) +} + +module.exports = { + asyncFolderWalker, + allFiles, + all +} + + +/***/ }), + +/***/ 740: +/***/ (function(module) { + +module.exports = class FixedFIFO { + constructor (hwm) { + if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two') + this.buffer = new Array(hwm) + this.mask = hwm - 1 + this.top = 0 + this.btm = 0 + this.next = null + } + + push (data) { + if (this.buffer[this.top] !== undefined) return false + this.buffer[this.top] = data + this.top = (this.top + 1) & this.mask + return true + } + + shift () { + const last = this.buffer[this.btm] + if (last === undefined) return undefined + this.buffer[this.btm] = undefined + this.btm = (this.btm + 1) & this.mask + return last + } + + isEmpty () { + return this.buffer[this.btm] === undefined + } +} + + +/***/ }), + +/***/ 745: +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { EventEmitter } = __webpack_require__(614) +const STREAM_DESTROYED = new Error('Stream was destroyed') + +const FIFO = __webpack_require__(134) + +/* eslint-disable no-multi-spaces */ + +const MAX = ((1 << 25) - 1) + +// Shared state +const OPENING = 0b001 +const DESTROYING = 0b010 +const DESTROYED = 0b100 + +const NOT_OPENING = MAX ^ OPENING + +// Read state +const READ_ACTIVE = 0b0000000000001 << 3 +const READ_PRIMARY = 0b0000000000010 << 3 +const READ_SYNC = 0b0000000000100 << 3 +const READ_QUEUED = 0b0000000001000 << 3 +const READ_RESUMED = 0b0000000010000 << 3 +const READ_PIPE_DRAINED = 0b0000000100000 << 3 +const READ_ENDING = 0b0000001000000 << 3 +const READ_EMIT_DATA = 0b0000010000000 << 3 +const READ_EMIT_READABLE = 0b0000100000000 << 3 +const READ_EMITTED_READABLE = 0b0001000000000 << 3 +const READ_DONE = 0b0010000000000 << 3 +const READ_NEXT_TICK = 0b0100000000001 << 3 // also active +const READ_NEEDS_PUSH = 0b1000000000000 << 3 + +const READ_NOT_ACTIVE = MAX ^ READ_ACTIVE +const READ_NON_PRIMARY = MAX ^ READ_PRIMARY +const READ_NON_PRIMARY_AND_PUSHED = MAX ^ (READ_PRIMARY | READ_NEEDS_PUSH) +const READ_NOT_SYNC = MAX ^ READ_SYNC +const READ_PUSHED = MAX ^ READ_NEEDS_PUSH +const READ_PAUSED = MAX ^ READ_RESUMED +const READ_NOT_QUEUED = MAX ^ (READ_QUEUED | READ_EMITTED_READABLE) +const READ_NOT_ENDING = MAX ^ READ_ENDING +const READ_PIPE_NOT_DRAINED = MAX ^ (READ_RESUMED | READ_PIPE_DRAINED) +const READ_NOT_NEXT_TICK = MAX ^ READ_NEXT_TICK + +// Write state +const WRITE_ACTIVE = 0b000000001 << 16 +const WRITE_PRIMARY = 0b000000010 << 16 +const WRITE_SYNC = 0b000000100 << 16 +const WRITE_QUEUED = 0b000001000 << 16 +const WRITE_UNDRAINED = 0b000010000 << 16 +const WRITE_DONE = 0b000100000 << 16 +const WRITE_EMIT_DRAIN = 0b001000000 << 16 +const WRITE_NEXT_TICK = 0b010000001 << 16 // also active +const WRITE_FINISHING = 0b100000000 << 16 + +const WRITE_NOT_ACTIVE = MAX ^ WRITE_ACTIVE +const WRITE_NOT_SYNC = MAX ^ WRITE_SYNC +const WRITE_NON_PRIMARY = MAX ^ WRITE_PRIMARY +const WRITE_NOT_FINISHING = MAX ^ WRITE_FINISHING +const WRITE_DRAINED = MAX ^ WRITE_UNDRAINED +const WRITE_NOT_QUEUED = MAX ^ WRITE_QUEUED +const WRITE_NOT_NEXT_TICK = MAX ^ WRITE_NEXT_TICK + +// Combined shared state +const ACTIVE = READ_ACTIVE | WRITE_ACTIVE +const NOT_ACTIVE = MAX ^ ACTIVE +const DONE = READ_DONE | WRITE_DONE +const DESTROY_STATUS = DESTROYING | DESTROYED +const OPEN_STATUS = DESTROY_STATUS | OPENING +const AUTO_DESTROY = DESTROY_STATUS | DONE +const NON_PRIMARY = WRITE_NON_PRIMARY & READ_NON_PRIMARY + +// Combined read state +const READ_PRIMARY_STATUS = OPEN_STATUS | READ_ENDING | READ_DONE +const READ_STATUS = OPEN_STATUS | READ_DONE | READ_QUEUED +const READ_FLOWING = READ_RESUMED | READ_PIPE_DRAINED +const READ_ACTIVE_AND_SYNC = READ_ACTIVE | READ_SYNC +const READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH = READ_ACTIVE | READ_SYNC | READ_NEEDS_PUSH +const READ_PRIMARY_AND_ACTIVE = READ_PRIMARY | READ_ACTIVE +const READ_ENDING_STATUS = OPEN_STATUS | READ_ENDING | READ_QUEUED +const READ_EMIT_READABLE_AND_QUEUED = READ_EMIT_READABLE | READ_QUEUED +const READ_READABLE_STATUS = OPEN_STATUS | READ_EMIT_READABLE | READ_QUEUED | READ_EMITTED_READABLE +const SHOULD_NOT_READ = OPEN_STATUS | READ_ACTIVE | READ_ENDING | READ_DONE | READ_NEEDS_PUSH +const READ_BACKPRESSURE_STATUS = DESTROY_STATUS | READ_ENDING | READ_DONE + +// Combined write state +const WRITE_PRIMARY_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_DONE +const WRITE_QUEUED_AND_UNDRAINED = WRITE_QUEUED | WRITE_UNDRAINED +const WRITE_QUEUED_AND_ACTIVE = WRITE_QUEUED | WRITE_ACTIVE +const WRITE_DRAIN_STATUS = WRITE_QUEUED | WRITE_UNDRAINED | OPEN_STATUS | WRITE_ACTIVE +const WRITE_STATUS = OPEN_STATUS | WRITE_ACTIVE | WRITE_QUEUED +const WRITE_PRIMARY_AND_ACTIVE = WRITE_PRIMARY | WRITE_ACTIVE +const WRITE_ACTIVE_AND_SYNC = WRITE_ACTIVE | WRITE_SYNC +const WRITE_FINISHING_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_QUEUED +const WRITE_BACKPRESSURE_STATUS = WRITE_UNDRAINED | DESTROY_STATUS | WRITE_FINISHING | WRITE_DONE + +const asyncIterator = Symbol.asyncIterator || Symbol('asyncIterator') + +class WritableState { + constructor (stream, { highWaterMark = 16384, map = null, mapWritable, byteLength, byteLengthWritable } = {}) { + this.stream = stream + this.queue = new FIFO() + this.highWaterMark = highWaterMark + this.buffered = 0 + this.error = null + this.pipeline = null + this.byteLength = byteLengthWritable || byteLength || defaultByteLength + this.map = mapWritable || map + this.afterWrite = afterWrite.bind(this) + } + + push (data) { + if (this.map !== null) data = this.map(data) + + this.buffered += this.byteLength(data) + this.queue.push(data) + + if (this.buffered < this.highWaterMark) { + this.stream._duplexState |= WRITE_QUEUED + return true + } + + this.stream._duplexState |= WRITE_QUEUED_AND_UNDRAINED + return false + } + + shift () { + const data = this.queue.shift() + const stream = this.stream + + this.buffered -= this.byteLength(data) + if (this.buffered === 0) stream._duplexState &= WRITE_NOT_QUEUED + + return data + } + + end (data) { + if (data !== undefined && data !== null) this.push(data) + this.stream._duplexState = (this.stream._duplexState | WRITE_FINISHING) & WRITE_NON_PRIMARY + } + + autoBatch (data, cb) { + const buffer = [] + const stream = this.stream + + buffer.push(data) + while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) { + buffer.push(stream._writableState.shift()) + } + + if ((stream._duplexState & OPEN_STATUS) !== 0) return cb(null) + stream._writev(buffer, cb) + } + + update () { + const stream = this.stream + + while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED) { + const data = this.shift() + stream._duplexState |= WRITE_ACTIVE_AND_SYNC + stream._write(data, this.afterWrite) + stream._duplexState &= WRITE_NOT_SYNC + } + + if ((stream._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() + } + + updateNonPrimary () { + const stream = this.stream + + if ((stream._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) { + stream._duplexState = (stream._duplexState | WRITE_ACTIVE) & WRITE_NOT_FINISHING + stream._final(afterFinal.bind(this)) + return + } + + if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { + if ((stream._duplexState & ACTIVE) === 0) { + stream._duplexState |= ACTIVE + stream._destroy(afterDestroy.bind(this)) + } + return + } + + if ((stream._duplexState & OPEN_STATUS) === OPENING) { + stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING + stream._open(afterOpen.bind(this)) + } + } + + updateNextTick () { + if ((this.stream._duplexState & WRITE_NEXT_TICK) !== 0) return + this.stream._duplexState |= WRITE_NEXT_TICK + process.nextTick(updateWriteNT, this) + } +} + +class ReadableState { + constructor (stream, { highWaterMark = 16384, map = null, mapReadable, byteLength, byteLengthReadable } = {}) { + this.stream = stream + this.queue = new FIFO() + this.highWaterMark = highWaterMark + this.buffered = 0 + this.error = null + this.pipeline = null + this.byteLength = byteLengthReadable || byteLength || defaultByteLength + this.map = mapReadable || map + this.pipeTo = null + this.afterRead = afterRead.bind(this) + } + + pipe (pipeTo, cb) { + if (this.pipeTo !== null) throw new Error('Can only pipe to one destination') + + this.stream._duplexState |= READ_PIPE_DRAINED + this.pipeTo = pipeTo + this.pipeline = new Pipeline(this.stream, pipeTo, cb || null) + + if (cb) this.stream.on('error', noop) // We already error handle this so supress crashes + + if (isStreamx(pipeTo)) { + pipeTo._writableState.pipeline = this.pipeline + if (cb) pipeTo.on('error', noop) // We already error handle this so supress crashes + pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) // TODO: just call finished from pipeTo itself + } else { + const onerror = this.pipeline.done.bind(this.pipeline, pipeTo) + const onclose = this.pipeline.done.bind(this.pipeline, pipeTo, null) // onclose has a weird bool arg + pipeTo.on('error', onerror) + pipeTo.on('close', onclose) + pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) + } + + pipeTo.on('drain', afterDrain.bind(this)) + this.stream.emit('pipe', pipeTo) + } + + push (data) { + const stream = this.stream + + if (data === null) { + this.highWaterMark = 0 + stream._duplexState = (stream._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED + return false + } + + if (this.map !== null) data = this.map(data) + this.buffered += this.byteLength(data) + this.queue.push(data) + + stream._duplexState = (stream._duplexState | READ_QUEUED) & READ_PUSHED + + return this.buffered < this.highWaterMark + } + + shift () { + const data = this.queue.shift() + + this.buffered -= this.byteLength(data) + if (this.buffered === 0) this.stream._duplexState &= READ_NOT_QUEUED + return data + } + + unshift (data) { + let tail + const pending = [] + + while ((tail === this.queue.shift()) !== undefined) { + pending.push(tail) + } + + this.push(data) + + for (let i = 0; i < pending.length; i++) { + this.queue.push(pending[i]) + } + } + + read () { + const stream = this.stream + + if ((stream._duplexState & READ_STATUS) === READ_QUEUED) { + const data = this.shift() + if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) + if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED + return data + } + + return null + } + + drain () { + const stream = this.stream + + while ((stream._duplexState & READ_STATUS) === READ_QUEUED && (stream._duplexState & READ_FLOWING) !== 0) { + const data = this.shift() + if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) + if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED + } + } + + update () { + const stream = this.stream + + this.drain() + + while (this.buffered < this.highWaterMark && (stream._duplexState & SHOULD_NOT_READ) === 0) { + stream._duplexState |= READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH + stream._read(this.afterRead) + stream._duplexState &= READ_NOT_SYNC + if ((stream._duplexState & READ_ACTIVE) === 0) this.drain() + } + + if ((stream._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) { + stream._duplexState |= READ_EMITTED_READABLE + stream.emit('readable') + } + + if ((stream._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() + } + + updateNonPrimary () { + const stream = this.stream + + if ((stream._duplexState & READ_ENDING_STATUS) === READ_ENDING) { + stream._duplexState = (stream._duplexState | READ_DONE) & READ_NOT_ENDING + stream.emit('end') + if ((stream._duplexState & AUTO_DESTROY) === DONE) stream._duplexState |= DESTROYING + if (this.pipeTo !== null) this.pipeTo.end() + } + + if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { + if ((stream._duplexState & ACTIVE) === 0) { + stream._duplexState |= ACTIVE + stream._destroy(afterDestroy.bind(this)) + } + return + } + + if ((stream._duplexState & OPEN_STATUS) === OPENING) { + stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING + stream._open(afterOpen.bind(this)) + } + } + + updateNextTick () { + if ((this.stream._duplexState & READ_NEXT_TICK) !== 0) return + this.stream._duplexState |= READ_NEXT_TICK + process.nextTick(updateReadNT, this) + } +} + +class TransformState { + constructor (stream) { + this.data = null + this.afterTransform = afterTransform.bind(stream) + this.afterFinal = null + } +} + +class Pipeline { + constructor (src, dst, cb) { + this.from = src + this.to = dst + this.afterPipe = cb + this.error = null + this.pipeToFinished = false + } + + finished () { + this.pipeToFinished = true + } + + done (stream, err) { + if (err) this.error = err + + if (stream === this.to) { + this.to = null + + if (this.from !== null) { + if ((this.from._duplexState & READ_DONE) === 0 || !this.pipeToFinished) { + this.from.destroy(new Error('Writable stream closed prematurely')) + } + return + } + } + + if (stream === this.from) { + this.from = null + + if (this.to !== null) { + if ((stream._duplexState & READ_DONE) === 0) { + this.to.destroy(new Error('Readable stream closed before ending')) + } + return + } + } + + if (this.afterPipe !== null) this.afterPipe(this.error) + this.to = this.from = this.afterPipe = null + } +} + +function afterDrain () { + this.stream._duplexState |= READ_PIPE_DRAINED + if ((this.stream._duplexState & READ_ACTIVE_AND_SYNC) === 0) this.updateNextTick() +} + +function afterFinal (err) { + const stream = this.stream + if (err) stream.destroy(err) + if ((stream._duplexState & DESTROY_STATUS) === 0) { + stream._duplexState |= WRITE_DONE + stream.emit('finish') + } + if ((stream._duplexState & AUTO_DESTROY) === DONE) { + stream._duplexState |= DESTROYING + } + + stream._duplexState &= WRITE_NOT_ACTIVE + this.update() +} + +function afterDestroy (err) { + const stream = this.stream + + if (!err && this.error !== STREAM_DESTROYED) err = this.error + if (err) stream.emit('error', err) + stream._duplexState |= DESTROYED + stream.emit('close') + + const rs = stream._readableState + const ws = stream._writableState + + if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream, err) + if (ws !== null && ws.pipeline !== null) ws.pipeline.done(stream, err) +} + +function afterWrite (err) { + const stream = this.stream + + if (err) stream.destroy(err) + stream._duplexState &= WRITE_NOT_ACTIVE + + if ((stream._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) { + stream._duplexState &= WRITE_DRAINED + if ((stream._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) { + stream.emit('drain') + } + } + + if ((stream._duplexState & WRITE_SYNC) === 0) this.update() +} + +function afterRead (err) { + if (err) this.stream.destroy(err) + this.stream._duplexState &= READ_NOT_ACTIVE + if ((this.stream._duplexState & READ_SYNC) === 0) this.update() +} + +function updateReadNT (rs) { + rs.stream._duplexState &= READ_NOT_NEXT_TICK + rs.update() +} + +function updateWriteNT (ws) { + ws.stream._duplexState &= WRITE_NOT_NEXT_TICK + ws.update() +} + +function afterOpen (err) { + const stream = this.stream + + if (err) stream.destroy(err) + + if ((stream._duplexState & DESTROYING) === 0) { + if ((stream._duplexState & READ_PRIMARY_STATUS) === 0) stream._duplexState |= READ_PRIMARY + if ((stream._duplexState & WRITE_PRIMARY_STATUS) === 0) stream._duplexState |= WRITE_PRIMARY + stream.emit('open') + } + + stream._duplexState &= NOT_ACTIVE + + if (stream._writableState !== null) { + stream._writableState.update() + } + + if (stream._readableState !== null) { + stream._readableState.update() + } +} + +function afterTransform (err, data) { + if (data !== undefined && data !== null) this.push(data) + this._writableState.afterWrite(err) +} + +class Stream extends EventEmitter { + constructor (opts) { + super() + + this._duplexState = 0 + this._readableState = null + this._writableState = null + + if (opts) { + if (opts.open) this._open = opts.open + if (opts.destroy) this._destroy = opts.destroy + if (opts.predestroy) this._predestroy = opts.predestroy + } + } + + _open (cb) { + cb(null) + } + + _destroy (cb) { + cb(null) + } + + _predestroy () { + // does nothing + } + + get readable () { + return this._readableState !== null + } + + get writable () { + return this._writableState !== null + } + + get destroyed () { + return (this._duplexState & DESTROYED) !== 0 + } + + get destroying () { + return (this._duplexState & DESTROY_STATUS) !== 0 + } + + destroy (err) { + if ((this._duplexState & DESTROY_STATUS) === 0) { + if (!err) err = STREAM_DESTROYED + this._duplexState = (this._duplexState | DESTROYING) & NON_PRIMARY + this._predestroy() + if (this._readableState !== null) { + this._readableState.error = err + this._readableState.updateNextTick() + } + if (this._writableState !== null) { + this._writableState.error = err + this._writableState.updateNextTick() + } + } + } + + on (name, fn) { + if (this._readableState !== null) { + if (name === 'data') { + this._duplexState |= (READ_EMIT_DATA | READ_RESUMED) + this._readableState.updateNextTick() + } + if (name === 'readable') { + this._duplexState |= READ_EMIT_READABLE + this._readableState.updateNextTick() + } + } + + if (this._writableState !== null) { + if (name === 'drain') { + this._duplexState |= WRITE_EMIT_DRAIN + this._writableState.updateNextTick() + } + } + + return super.on(name, fn) + } +} + +class Readable extends Stream { + constructor (opts) { + super(opts) + + this._duplexState |= OPENING | WRITE_DONE + this._readableState = new ReadableState(this, opts) + + if (opts) { + if (opts.read) this._read = opts.read + } + } + + _read (cb) { + cb(null) + } + + pipe (dest, cb) { + this._readableState.pipe(dest, cb) + this._readableState.updateNextTick() + return dest + } + + read () { + this._readableState.updateNextTick() + return this._readableState.read() + } + + push (data) { + this._readableState.updateNextTick() + return this._readableState.push(data) + } + + unshift (data) { + this._readableState.updateNextTick() + return this._readableState.unshift(data) + } + + resume () { + this._duplexState |= READ_RESUMED + this._readableState.updateNextTick() + } + + pause () { + this._duplexState &= READ_PAUSED + } + + static _fromAsyncIterator (ite) { + let destroy + + const rs = new Readable({ + read (cb) { + ite.next().then(push).then(cb.bind(null, null)).catch(cb) + }, + predestroy () { + destroy = ite.return() + }, + destroy (cb) { + destroy.then(cb.bind(null, null)).catch(cb) + } + }) + + return rs + + function push (data) { + if (data.done) rs.push(null) + else rs.push(data.value) + } + } + + static from (data) { + if (data[asyncIterator]) return this._fromAsyncIterator(data[asyncIterator]()) + if (!Array.isArray(data)) data = data === undefined ? [] : [data] + + let i = 0 + return new Readable({ + read (cb) { + this.push(i === data.length ? null : data[i++]) + cb(null) + } + }) + } + + static isBackpressured (rs) { + return (rs._duplexState & READ_BACKPRESSURE_STATUS) !== 0 || rs._readableState.buffered >= rs._readableState.highWaterMark + } + + [asyncIterator] () { + const stream = this + + let error = null + let ended = false + let promiseResolve + let promiseReject + + this.on('error', (err) => { error = err }) + this.on('end', () => { ended = true }) + this.on('close', () => call(error, null)) + this.on('readable', () => call(null, stream.read())) + + return { + [asyncIterator] () { + return this + }, + next () { + return new Promise(function (resolve, reject) { + promiseResolve = resolve + promiseReject = reject + const data = stream.read() + if (data !== null) call(null, data) + }) + }, + return () { + stream.destroy() + } + } + + function call (err, data) { + if (promiseReject === null) return + if (err) promiseReject(err) + else if (data === null && !ended) promiseReject(STREAM_DESTROYED) + else promiseResolve({ value: data, done: data === null }) + promiseReject = promiseResolve = null + } + } +} + +class Writable extends Stream { + constructor (opts) { + super(opts) + + this._duplexState |= OPENING | READ_DONE + this._writableState = new WritableState(this, opts) + + if (opts) { + if (opts.writev) this._writev = opts.writev + if (opts.write) this._write = opts.write + if (opts.final) this._final = opts.final + } + } + + _writev (batch, cb) { + cb(null) + } + + _write (data, cb) { + this._writableState.autoBatch(data, cb) + } + + _final (cb) { + cb(null) + } + + static isBackpressured (ws) { + return (ws._duplexState & WRITE_BACKPRESSURE_STATUS) !== 0 + } + + write (data) { + this._writableState.updateNextTick() + return this._writableState.push(data) + } + + end (data) { + this._writableState.updateNextTick() + this._writableState.end(data) + } +} + +class Duplex extends Readable { // and Writable + constructor (opts) { + super(opts) + + this._duplexState = OPENING + this._writableState = new WritableState(this, opts) + + if (opts) { + if (opts.writev) this._writev = opts.writev + if (opts.write) this._write = opts.write + if (opts.final) this._final = opts.final + } + } + + _writev (batch, cb) { + cb(null) + } + + _write (data, cb) { + this._writableState.autoBatch(data, cb) + } + + _final (cb) { + cb(null) + } + + write (data) { + this._writableState.updateNextTick() + return this._writableState.push(data) + } + + end (data) { + this._writableState.updateNextTick() + this._writableState.end(data) + } +} + +class Transform extends Duplex { + constructor (opts) { + super(opts) + this._transformState = new TransformState(this) + + if (opts) { + if (opts.transform) this._transform = opts.transform + if (opts.flush) this._flush = opts.flush + } + } + + _write (data, cb) { + if (this._readableState.buffered >= this._readableState.highWaterMark) { + this._transformState.data = data + } else { + this._transform(data, this._transformState.afterTransform) + } + } + + _read (cb) { + if (this._transformState.data !== null) { + const data = this._transformState.data + this._transformState.data = null + cb(null) + this._transform(data, this._transformState.afterTransform) + } else { + cb(null) + } + } + + _transform (data, cb) { + cb(null, data) + } + + _flush (cb) { + cb(null) + } + + _final (cb) { + this._transformState.afterFinal = cb + this._flush(transformAfterFlush.bind(this)) + } +} + +function transformAfterFlush (err, data) { + const cb = this._transformState.afterFinal + if (err) return cb(err) + if (data !== null && data !== undefined) this.push(data) + this.push(null) + cb(null) +} + +function isStream (stream) { + return !!stream._readableState || !!stream._writableState +} + +function isStreamx (stream) { + return typeof stream._duplexState === 'number' && isStream(stream) +} + +function defaultByteLength (data) { + return Buffer.isBuffer(data) ? data.length : 1024 +} + +function noop () {} + +module.exports = { + isStream, + isStreamx, + Stream, + Writable, + Readable, + Duplex, + Transform +} + + +/***/ }), + +/***/ 746: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var pump = __webpack_require__(453) +var inherits = __webpack_require__(689) +var Duplexify = __webpack_require__(394) + +var toArray = function(args) { + if (!args.length) return [] + return Array.isArray(args[0]) ? args[0] : Array.prototype.slice.call(args) +} + +var define = function(opts) { + var Pumpify = function() { + var streams = toArray(arguments) + if (!(this instanceof Pumpify)) return new Pumpify(streams) + Duplexify.call(this, null, null, opts) + if (streams.length) this.setPipeline(streams) + } + + inherits(Pumpify, Duplexify) + + Pumpify.prototype.setPipeline = function() { + var streams = toArray(arguments) + var self = this + var ended = false + var w = streams[0] + var r = streams[streams.length-1] + + r = r.readable ? r : null + w = w.writable ? w : null + + var onclose = function() { + streams[0].emit('error', new Error('stream was destroyed')) + } + + this.on('close', onclose) + this.on('prefinish', function() { + if (!ended) self.cork() + }) + + pump(streams, function(err) { + self.removeListener('close', onclose) + if (err) return self.destroy(err.message === 'premature close' ? null : err) + ended = true + // pump ends after the last stream is not writable *but* + // pumpify still forwards the readable part so we need to catch errors + // still, so reenable autoDestroy in this case + if (self._autoDestroy === false) self._autoDestroy = true + self.uncork() + }) + + if (this.destroyed) return onclose() + this.setWritable(w) + this.setReadable(r) + } + + return Pumpify +} + +module.exports = define({autoDestroy:false, destroy:false}) +module.exports.obj = define({autoDestroy: false, destroy:false, objectMode:true, highWaterMark:16}) +module.exports.ctor = define + + +/***/ }), + +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 751: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var defer = __webpack_require__(500); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} + + +/***/ }), + +/***/ 755: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var utils = __webpack_require__(581); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var defaults = { + allowDots: false, + allowPrototypes: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function (str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = options.decoder(part.slice(pos + 1), defaults.decoder, charset, 'value'); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { + val = val.split(','); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options) { + var leaf = val; + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; + + +/***/ }), + +/***/ 761: +/***/ (function(module) { + +module.exports = require("zlib"); + +/***/ }), + +/***/ 779: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __webpack_require__(852) +var extname = __webpack_require__(622).extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} + + +/***/ }), + +/***/ 823: +/***/ (function(module) { + +async function handleResponse (response) { + const contentType = response.headers.get('Content-Type'); + const isJSON = contentType && contentType.match(/json/); + const data = isJSON ? await response.json() : await response.text(); + if (response.ok) return data; + else { + return isJSON + ? Promise.reject(new JSONHTTPError(response, data)) + : Promise.reject(new TextHTTPError(response, data)); + } +} + +class HTTPError extends Error { + constructor (response) { + super(response.statusText); + this.name = this.constructor.name; + if (typeof Error.captureStackTrace === 'function') { + Error.captureStackTrace(this, this.constructor); + } else { + this.stack = new Error(response.statusText).stack; + } + this.status = response.status; + } +} + +class TextHTTPError extends HTTPError { + constructor (response, data) { + super(response); + this.data = data; + } +} + +class JSONHTTPError extends HTTPError { + constructor (response, json) { + super(response); + this.json = json; + } +} + +module.exports = { + handleResponse, + HTTPError, + TextHTTPError, + JSONHTTPError +}; + + +/***/ }), + +/***/ 831: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = __webpack_require__(226); + +var Writable = __webpack_require__(241); + +__webpack_require__(689)(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 835: +/***/ (function(module) { + +module.exports = require("url"); + +/***/ }), + +/***/ 852: +/***/ (function(module, __unusedexports, __webpack_require__) { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __webpack_require__(512) + + +/***/ }), + +/***/ 882: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + +module.exports = PassThrough; + +var Transform = __webpack_require__(925); + +__webpack_require__(689)(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 892: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var iterate = __webpack_require__(157) + , initState = __webpack_require__(147) + , terminator = __webpack_require__(939) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} + + +/***/ }), + +/***/ 896: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = __webpack_require__(293), + Buffer = _require.Buffer; + +var _require2 = __webpack_require__(669), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); + +/***/ }), + +/***/ 897: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var utils = __webpack_require__(581); +var formats = __webpack_require__(13); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = obj.join(','); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key'); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (isArray(obj)) { + pushToArray(values, stringify( + obj[key], + typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } else { + pushToArray(values, stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.formatter, + options.encodeValuesOnly, + options.charset + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + + +/***/ }), + +/***/ 917: +/***/ (function(module, __unusedexports, __webpack_require__) { + + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = __webpack_require__(669).deprecate; + + +/***/ }), + +/***/ 925: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + +module.exports = Transform; + +var _require$codes = __webpack_require__(563).codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = __webpack_require__(831); + +__webpack_require__(689)(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 928: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var CombinedStream = __webpack_require__(547); +var util = __webpack_require__(669); +var path = __webpack_require__(622); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var parseUrl = __webpack_require__(835).parse; +var fs = __webpack_require__(747); +var mime = __webpack_require__(779); +var asynckit = __webpack_require__(334); +var populate = __webpack_require__(69); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; + + +/***/ }), + +/***/ 939: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var abort = __webpack_require__(566) + , async = __webpack_require__(751) + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} + + +/***/ }), + +/***/ 979: +/***/ (function(module) { + +/** + * Simple timer lets you record start and stop times, with an elapsed time getter. + */ +class SimpleTimer { + constructor (startTime) { + this.start = startTime || Date.now() + this.end = null + this.stopped = false + } + + get elapsed () { + if (this.stopped) { + return this.end - this.start + } else { + return Date.now() - this.start + } + } + + stop () { + if (this.stopped) return + this.stopped = true + this.end = Date.now() + } + + toString () { + return this.elapsed + } + + toJSON () { + return this.elapsed + } +} + +module.exports = SimpleTimer + + +/***/ }) + +/******/ }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/index.js.map b/dist/index.js.map new file mode 100644 index 0000000..047e8cf --- /dev/null +++ b/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["/webpack/bootstrap","../node_modules/end-of-stream/index.js","../node_modules/wrappy/wrappy.js","../node_modules/qs/lib/formats.js","../(webpack)/ncc/@@notfound.js","../node_modules/readable-stream/lib/internal/streams/async_iterator.js","../node_modules/once/once.js","../node_modules/form-data/lib/populate.js","../external \"os\"","../node_modules/asynckit/serial.js","../node_modules/async-neocities/lib/folder-diff.js","../index.js","../node_modules/async-neocities/lib/stream-meter.js","../node_modules/fast-fifo/index.js","../node_modules/asynckit/lib/state.js","../node_modules/safe-buffer/index.js","../node_modules/delayed-stream/lib/delayed_stream.js","../node_modules/asynckit/lib/iterate.js","../node_modules/readable-stream/lib/internal/streams/from.js","../external \"https\"","../node_modules/readable-stream/lib/internal/streams/state.js","../node_modules/readable-stream/lib/_stream_readable.js","../node_modules/readable-stream/lib/internal/streams/destroy.js","../node_modules/readable-stream/lib/internal/streams/pipeline.js","../node_modules/readable-stream/lib/_stream_writable.js","../node_modules/async-neocities/index.js","../node_modules/readable-stream/lib/internal/streams/end-of-stream.js","../external \"buffer\"","../node_modules/inherits/inherits_browser.js","../node_modules/ms/index.js","../node_modules/asynckit/index.js","../node_modules/qs/lib/index.js","../node_modules/duplexify/index.js","../external \"stream\"","../external \"crypto\"","../node_modules/async-neocities/lib/stats-handler.js","../node_modules/asynckit/parallel.js","../node_modules/readable-stream/lib/internal/streams/stream.js","../node_modules/@actions/core/lib/command.js","../node_modules/pump/index.js","../node_modules/node-fetch/lib/index.js","../node_modules/@actions/core/lib/core.js","../node_modules/stream-shift/index.js","../node_modules/nanoassert/index.js","../node_modules/asynckit/lib/defer.js","../node_modules/combined-stream/lib/combined_stream.js","../node_modules/readable-stream/errors.js","../node_modules/asynckit/lib/abort.js","../node_modules/readable-stream/readable.js","../node_modules/qs/lib/utils.js","../node_modules/pretty-bytes/index.js","../external \"http\"","../external \"events\"","../external \"path\"","../external \"util\"","../node_modules/string_decoder/lib/string_decoder.js","../node_modules/inherits/inherits.js","../node_modules/async-folder-walker/index.js","../node_modules/fast-fifo/fixed-size.js","../node_modules/streamx/index.js","../node_modules/pumpify/index.js","../external \"fs\"","../node_modules/asynckit/lib/async.js","../node_modules/qs/lib/parse.js","../external \"zlib\"","../node_modules/mime-types/index.js","../node_modules/fetch-errors/index.js","../node_modules/readable-stream/lib/_stream_duplex.js","../external \"url\"","../node_modules/mime-db/index.js","../node_modules/readable-stream/lib/_stream_passthrough.js","../node_modules/asynckit/serialOrdered.js","../node_modules/readable-stream/lib/internal/streams/buffer_list.js","../node_modules/qs/lib/stringify.js","../node_modules/util-deprecate/node.js","../node_modules/readable-stream/lib/_stream_transform.js","../node_modules/form-data/lib/form_data.js","../node_modules/asynckit/lib/terminator.js","../node_modules/async-neocities/lib/timer.js"],"names":[],"mappings":";;;AAAA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;;;;;;;ACrCA,WAAW,mBAAO,CAAC,EAAM;;AAEzB;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,EAAE,4BAA4B;AAC9B;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;;;;;;;;AC7Fd;AACA;AACA;AACA;AACA;AACA,cAAc;AACd;AACA;;AAEA;AACA;;AAEA;AACA;AACA,GAAG;;AAEH;;AAEA;AACA;AACA,mBAAmB,iBAAiB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;;;;;;;;;AChCa;;AAEb;AACA;;AAEA,WAAW,mBAAO,CAAC,GAAS;;AAE5B;AACA;AACA;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;AACA;AACA,aAAa;AACb;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;;;;;;;;ACzBA,eAAuB;;;;;;;;;ACAV;;AAEb;;AAEA,2CAA2C,kBAAkB,kCAAkC,qEAAqE,EAAE,EAAE,OAAO,kBAAkB,EAAE,YAAY;;AAE/M,eAAe,mBAAO,CAAC,GAAiB;;AAExC;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA,oCAAoC;AACpC;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,KAAK;AACL;AACA;;AAEA,iEAAiE;AACjE;AACA;AACA;AACA,GAAG;;AAEH;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW;AACX;AACA;AACA,SAAS;AACT,OAAO;AACP,KAAK;AACL;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,CAAC;AACD;AACA,CAAC;AACD;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,KAAK;AACL,GAAG;AACH,CAAC;;AAED;AACA;;AAEA,yFAAyF;AACzF;AACA;AACA,GAAG;AACH;AACA;AACA,GAAG;AACH;AACA;AACA,GAAG;AACH;AACA;AACA,GAAG;AACH;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA,KAAK;AACL;AACA,GAAG;AACH;AACA;AACA;AACA,yCAAyC;AACzC;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;;AAEA,cAAc,qC;;;;;;;AC9Md,aAAa,mBAAO,CAAC,EAAQ;AAC7B,cAAc;AACd,cAAc;;AAEd;AACA;AACA;AACA;AACA,KAAK;AACL;AACA,GAAG;;AAEH;AACA;AACA;AACA,KAAK;AACL;AACA,GAAG;AACH,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA,cAAc;;AAEd;AACA;AACA;AACA,GAAG;;AAEH;AACA;;;;;;;;ACTA,+B;;;;;;;ACAA,oBAAoB,mBAAO,CAAC,GAAoB;;AAEhD;AACA,cAAc;;AAEd;AACA;AACA;AACA,aAAa,aAAa;AAC1B,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB;AACA;AACA;AACA;AACA;;;;;;;;AChBA,eAAe,mBAAO,CAAC,GAAQ;AAC/B,aAAa,mBAAO,CAAC,GAAM;AAC3B,WAAW,mBAAO,CAAC,GAAI;;AAEvB,6BAA6B,mBAAO,CAAC,GAAM;;AAE3C;AACA;AACA,YAAY,MAAM;AAClB,YAAY,MAAM;AAClB,YAAY,gBAAgB;AAC5B;AACA;AACA;AACA;;AAEA;AACA,kCAAkC,sBAAsB;AACxD;;AAEA;AACA,qCAAqC,8CAA8C;AACnF;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA,0CAA0C,mBAAmB;;AAE7D;AACA,yCAAyC,mBAAmB;;AAE5D;AACA;;AAEA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;;AAEA,cAAc;AACd;AACA;;AAEA;AACA;AACA,YAAY,OAAO;AACnB,YAAY,gBAAgB;AAC5B;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA,YAAY,IAAI;AAChB,YAAY,IAAI;AAChB,YAAY,IAAI;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,YAAY,IAAI;AAChB,YAAY,IAAI;AAChB,YAAY,IAAI;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,YAAY,OAAO;AACnB,YAAY,OAAO;AACnB;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA,IAAI;;;;;;;;AClLJ,aAAa,mBAAO,CAAC,GAAe;AACpC;AACA,kBAAkB,mBAAO,CAAC,GAAiB;AAC3C,aAAa,mBAAO,CAAC,GAAM;AAC3B,WAAW,mBAAO,CAAC,GAAI;AACvB,eAAe,mBAAO,CAAC,GAAY;AACnC,YAAY,mBAAO,CAAC,GAAI;;AAExB;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,GAAG;;AAEH,0CAA0C,eAAe;AACzD,8BAA8B,2BAA2B;AACzD,qBAAqB,iCAAiC,GAAG,2BAA2B;AACpF,6BAA6B,0BAA0B;AACvD;;AAEA;AACA;AACA;AACA,CAAC;;;;;;;;ACjCD,OAAO,sBAAsB,GAAG,mBAAO,CAAC,GAAS;AACjD,aAAa,mBAAO,CAAC,GAAM;AAC3B,gBAAgB,mBAAO,CAAC,GAAS;;AAEjC;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,GAAG;;AAEH;AACA;AACA;AACA;AACA,KAAK;AACL,GAAG;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA,cAAc;AACd;AACA;AACA;;;;;;;;ACrCA,kBAAkB,mBAAO,CAAC,GAAc;;AAExC,cAAc;AACd;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;;;;;;;AC/BA;AACA,cAAc;;AAEd;AACA;AACA;AACA;AACA,aAAa,aAAa;AAC1B,aAAa,cAAc;AAC3B;AACA,aAAa,OAAO;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mBAAmB;AACnB,iCAAiC;AACjC;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;;AAEA;AACA;;;;;;;;ACpCA;AACA,aAAa,mBAAO,CAAC,GAAQ;AAC7B;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE,cAAc;AAChB,CAAC;AACD;AACA,oBAAoB,OAAO;AAC3B,EAAE,OAAO;AACT;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA,aAAa,mBAAO,CAAC,GAAQ;AAC7B,WAAW,mBAAO,CAAC,GAAM;;AAEzB,cAAc;AACd;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA,kCAAkC;AAClC;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA,YAAY,mBAAO,CAAC,GAAY;AAChC,YAAY,mBAAO,CAAC,GAAY;AAChC;;AAEA;AACA,cAAc;;AAEd;AACA;AACA;AACA,WAAW,aAAa;AACxB,WAAW,SAAS;AACpB,WAAW,OAAO;AAClB,WAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;;AAEA;AACA;AACA;AACA,aAAa,SAAS;AACtB,aAAa,cAAc;AAC3B,aAAa,MAAM;AACnB,aAAa,SAAS;AACtB,aAAa,eAAe;AAC5B;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;;;;;;;;AC1Ea;;AAEb,4EAA4E,MAAM,0BAA0B,wBAAwB,EAAE,gBAAgB,eAAe,QAAQ,EAAE,iBAAiB,gBAAgB,EAAE,OAAO,4CAA4C,EAAE;;AAEvQ,gCAAgC,qBAAqB,mCAAmC,gDAAgD,gCAAgC,wBAAwB,wEAAwE,EAAE,uBAAuB,uEAAuE,EAAE,kBAAkB,EAAE,EAAE,GAAG;;AAEnY,0CAA0C,gCAAgC,oCAAoC,oDAAoD,8DAA8D,gEAAgE,EAAE,EAAE,gCAAgC,EAAE,aAAa;;AAEnV,gCAAgC,gBAAgB,sBAAsB,OAAO,uDAAuD,aAAa,uDAAuD,2CAA2C,EAAE,EAAE,EAAE,6CAA6C,2EAA2E,EAAE,OAAO,iDAAiD,kFAAkF,EAAE,EAAE,EAAE,EAAE,eAAe;;AAEphB,2CAA2C,kBAAkB,kCAAkC,qEAAqE,EAAE,EAAE,OAAO,kBAAkB,EAAE,YAAY;;AAE/M,2BAA2B,mBAAO,CAAC,GAAiB;;AAEpD;AACA;;AAEA;AACA;AACA,GAAG,kGAAkG,uFAAuF;;AAE5L;AACA;AACA,GAAG,SAAS;AACZ;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,SAAS;AACT;AACA,SAAS;AACT;AACA;AACA,OAAO;AACP;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA;;AAEA,cAAc,Q;;;;;;;AC/Dd,kC;;;;;;;;ACAa;;AAEb,4BAA4B,mBAAO,CAAC,GAAiB;;AAErD;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;;;AAGH;AACA;;AAEA,cAAc;AACd;AACA,E;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACa;;AAEb,cAAc;AACd;;AAEA;AACA;;AAEA;AACA;;AAEA,SAAS,mBAAO,CAAC,GAAQ;;AAEzB;AACA;AACA;AACA;;AAEA;;;AAGA,aAAa,mBAAO,CAAC,GAA2B;AAChD;;;AAGA,aAAa,mBAAO,CAAC,GAAQ;;AAE7B;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;;AAGA,gBAAgB,mBAAO,CAAC,GAAM;;AAE9B;;AAEA;AACA;AACA,CAAC;AACD;AACA;AACA;;;AAGA,iBAAiB,mBAAO,CAAC,GAAgC;;AAEzD,kBAAkB,mBAAO,CAAC,GAA4B;;AAEtD,eAAe,mBAAO,CAAC,GAA0B;AACjD;;AAEA,qBAAqB,mBAAO,CAAC,GAAW;AACxC;AACA;AACA;AACA,2FAA2F;;;AAG3F;AACA;AACA;;AAEA,mBAAO,CAAC,GAAU;;AAElB;AACA;;AAEA;AACA;AACA;AACA,+FAA+F;AAC/F;AACA;AACA;;AAEA,yEAAyE,mFAAmF;AAC5J;;AAEA;AACA,qBAAqB,mBAAO,CAAC,GAAkB;AAC/C,0BAA0B;AAC1B;AACA;AACA;AACA;;AAEA,yEAAyE;AACzE;;AAEA;AACA,kFAAkF;AAClF;;AAEA,0FAA0F;AAC1F;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;AACA;;AAEA,mBAAmB;AACnB;;AAEA;AACA;AACA;AACA;AACA,qBAAqB;;AAErB,+CAA+C;;AAE/C,2CAA2C;;AAE3C,yBAAyB;AACzB;AACA;;AAEA,2DAA2D;;AAE3D,sBAAsB;;AAEtB;AACA;AACA;;AAEA;AACA,wCAAwC,mBAAO,CAAC,GAAiB;AACjE;AACA;AACA;AACA;;AAEA;AACA,qBAAqB,mBAAO,CAAC,GAAkB;AAC/C,gEAAgE;AAChE;;AAEA;AACA,mEAAmE;;AAEnE;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;;;AAGA;AACA;AACA,CAAC;AACD;AACA;;AAEA;AACA;AACA,EAAE;AACF;AACA;AACA;;;AAGA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA,EAAE;;;AAGF;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA,+FAA+F;AAC/F,OAAO;AACP;AACA,OAAO;AACP;AACA,OAAO;AACP;;AAEA;AACA;AACA,4FAA4F;AAC5F,SAAS;AACT;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA,GAAG;AACH;AACA;;;AAGA;AACA;;AAEA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA,gDAAgD;AAChD;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,EAAE;;;AAGF;AACA,sCAAsC,mBAAO,CAAC,GAAiB;AAC/D;AACA,wCAAwC;;AAExC,sEAAsE;;AAEtE;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,EAAE;;;AAGF;;AAEA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,CAAC;AACD;;;AAGA;AACA;AACA;;AAEA;AACA;AACA,4EAA4E;AAC5E,GAAG;;;AAGH;AACA,kCAAkC;;AAElC;AACA;AACA;AACA;;AAEA;AACA,CAAC;;;AAGD;AACA;AACA;AACA;AACA;AACA,6CAA6C;AAC7C;AACA;;AAEA;AACA;AACA,6DAA6D;AAC7D;AACA;;AAEA,8BAA8B;;AAE9B;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAGA;AACA,iCAAiC;;AAEjC;AACA;AACA;AACA,GAAG;AACH;;;AAGA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA,sBAAsB;;AAEtB,sDAAsD;;AAEtD;;AAEA,uBAAuB;AACvB;;AAEA;AACA;;AAEA;AACA,sCAAsC;;AAEtC;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA,gDAAgD;;AAEhD;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;;;AAGA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;;AAGA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;;;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,CAAC;AACD;AACA;AACA;;;AAGA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,gDAAgD;AAChD;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;;AAGA;AACA;AACA;;AAEA;AACA,qBAAqB;;AAErB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;;;AAGA;AACA;AACA;AACA;AACA;AACA,GAAG;;;AAGH,0CAA0C;;AAE1C;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,GAAG;;;AAGH,yBAAyB;;AAEzB;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,IAAI;;AAEJ,0CAA0C;;AAE1C;AACA;AACA;AACA,kCAAkC;;AAElC;AACA;AACA;AACA;AACA;AACA,GAAG;;;AAGH;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,mBAAmB,SAAS;AAC5B;AACA;AACA,OAAO;AACP;;AAEA;AACA,GAAG;;;AAGH;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE;AACF;;;AAGA;AACA;AACA;;AAEA;AACA;AACA;AACA,iEAAiE;;AAEjE;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,yBAAyB;AACzB,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA,CAAC;AACD;;;AAGA;AACA;;AAEA;AACA,oBAAoB;AACpB;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,CAAC;AACD;AACA;;;AAGA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA,0DAA0D;;AAE1D,4EAA4E;;AAE5E;;AAEA;AACA;AACA;AACA;AACA,GAAG,EAAE;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,GAAG;;;AAGH,iBAAiB,yBAAyB;AAC1C;AACA,GAAG;AACH;;;AAGA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,0CAA0C,mBAAO,CAAC,EAAmC;AACrF;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA,CAAC,EAAE;;AAEH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC,EAAE;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,mDAAmD;AACnD;AACA,mDAAmD,+DAA+D;AAClH;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,yDAAyD;;AAEzD;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,aAAa,mBAAO,CAAC,GAAyB;AAC9C;;AAEA;AACA;AACA;;AAEA;AACA,gCAAgC,OAAO;AACvC;AACA;;AAEA;AACA,C;;;;;;;;ACnmCa;;AAEb;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;;;AAGA;AACA;AACA,GAAG;;;AAGH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA,OAAO;AACP;AACA;AACA,KAAK;AACL;AACA;AACA,KAAK;AACL;AACA;AACA,GAAG;;AAEH;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,wFAAwF;AACxF;;AAEA,cAAc;AACd;AACA;AACA;AACA,E;;;;;;;;ACxGA;AACA;AACa;;AAEb;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,qBAAqB,mBAAO,CAAC,GAAiB;AAC9C;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH,+BAA+B,mBAAO,CAAC,GAAiB;AACxD;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,qBAAqB;;AAErB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,wEAAwE,aAAa;AACrF;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL,GAAG;AACH;AACA;;AAEA,cAAc,Y;;;;;;;;AChGd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACa;;AAEb,cAAc;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA,aAAa,mBAAO,CAAC,GAAgB;AACrC;AACA;;AAEA;;AAEA,aAAa,mBAAO,CAAC,GAA2B;AAChD;;;AAGA,aAAa,mBAAO,CAAC,GAAQ;;AAE7B;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,kBAAkB,mBAAO,CAAC,GAA4B;;AAEtD,eAAe,mBAAO,CAAC,GAA0B;AACjD;;AAEA,qBAAqB,mBAAO,CAAC,GAAW;AACxC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA,mBAAO,CAAC,GAAU;;AAElB;;AAEA;AACA,qBAAqB,mBAAO,CAAC,GAAkB;AAC/C,0BAA0B;AAC1B;AACA;AACA;AACA;;AAEA,yEAAyE;AACzE;;AAEA;AACA,kFAAkF;AAClF;AACA;;AAEA,0FAA0F;;AAE1F,2BAA2B;;AAE3B,yBAAyB;;AAEzB,sBAAsB;;AAEtB,qBAAqB;;AAErB,wBAAwB;;AAExB,yBAAyB;AACzB;AACA;;AAEA;AACA,iCAAiC;AACjC;AACA;;AAEA,2DAA2D;AAC3D;AACA;;AAEA,kBAAkB;;AAElB,uBAAuB;;AAEvB,kBAAkB;AAClB;AACA;AACA;;AAEA,mBAAmB;AACnB;AACA;;AAEA,gCAAgC;;AAEhC;AACA;AACA,IAAI;;;AAGJ,sBAAsB;;AAEtB;AACA;AACA,kCAAkC;AAClC;;AAEA,qBAAqB;AACrB;;AAEA,2BAA2B;;AAE3B,4BAA4B;;AAE5B,+CAA+C;;AAE/C,2CAA2C;;AAE3C,gCAAgC;AAChC;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP,KAAK;AACL,GAAG;AACH,CAAC,IAAI;AACL;;;AAGA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH,CAAC;AACD;AACA;AACA;AACA;;AAEA;AACA,qBAAqB,mBAAO,CAAC,GAAkB,EAAE;AACjD;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,mEAAmE;;AAEnE;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,CAAC;;;AAGD;AACA;AACA;;AAEA;AACA,4CAA4C;;AAE5C;AACA;AACA,CAAC;AACD;AACA;;;AAGA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,iCAAiC;AACjC;AACA,4CAA4C;AAC5C;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC,EAAE;AACH;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,+CAA+C;;AAE/C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA,GAAG;AACH;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,wEAAwE,sDAAsD;AAC9H;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,6BAA6B;AAC7B;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,+BAA+B;AAC/B;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oDAAoD;AACpD;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;;;AAGA;AACA;AACA;AACA;AACA;AACA,CAAC;;;AAGD;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,0EAA0E;AAC1E;;AAEA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mCAAmC;AACnC;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA,yEAAyE;;AAEzE;AACA;AACA;AACA,GAAG;;;AAGH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA,6CAA6C;AAC7C;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,GAAG;;;AAGH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;;;AAGA;AACA;AACA,CAAC;AACD;AACA;;AAEA;AACA;AACA,E;;;;;;;ACxrBA,OAAO,iBAAiB,GAAG,mBAAO,CAAC,GAAc;AACjD,OAAO,mBAAmB,GAAG,mBAAO,CAAC,GAAI;AACzC,YAAY,mBAAO,CAAC,GAAqB;AACzC,iBAAiB,mBAAO,CAAC,GAAW;AACpC,eAAe,mBAAO,CAAC,GAAY;AACnC,cAAc,mBAAO,CAAC,GAAY;AAClC,OAAO,MAAM,GAAG,mBAAO,CAAC,GAAK;AAC7B,WAAW,mBAAO,CAAC,GAAI;AACvB,WAAW,mBAAO,CAAC,EAAI;;AAEvB,OAAO,qBAAqB,GAAG,mBAAO,CAAC,EAAmB;AAC1D,YAAY,mBAAO,CAAC,GAAgB;AACpC,oBAAoB,mBAAO,CAAC,GAAa;AACzC,OAAO,+BAA+B,GAAG,mBAAO,CAAC,GAAoB;AACrE,qBAAqB,mBAAO,CAAC,GAAqB;;AAElD;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,gBAAgB;AAC9B;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,KAAK;;AAEL;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA,iCAAiC;;AAEjC;AACA;AACA,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;;AAEL;AACA;AACA;;AAEA;AACA;AACA,+BAA+B,YAAY;AAC3C;AACA,uCAAuC,YAAY,IAAI,UAAU;AACjE;AACA;;AAEA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB;AACA;AACA;AACA;AACA;AACA,KAAK;AACL,mCAAmC;;AAEnC,uBAAuB,SAAS;AAChC,8BAA8B,uBAAuB;;AAErD;AACA;AACA;;AAEA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,QAAQ,4BAA4B,EAAE;AACpD,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB;AACA;AACA;AACA;;AAEA;AACA;AACA,kBAAkB,cAAc;AAChC;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,KAAK;;AAEL;AACA,QAAQ;AACR;AACA;AACA;;AAEA,gCAAgC,SAAS;AACzC;AACA;;AAEA;AACA;AACA;AACA,0BAA0B;AAC1B;AACA,uBAAuB;AACvB;AACA;AACA,oCAAoC,aAAa;AACjD;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;;AAEL,6CAA6C,wBAAwB;AACrE;;AAEA;AACA;AACA;AACA,8BAA8B;AAC9B;AACA;AACA;AACA,uBAAuB;AACvB;AACA;;AAEA;AACA;AACA;AACA,KAAK;;AAEL,6CAA6C,wBAAwB;AACrE;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,QAAQ;AACtB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,QAAQ;AACtB,cAAc,QAAQ;AACtB,cAAc,QAAQ;AACtB;AACA;AACA;AACA;AACA,uBAAuB;AACvB;AACA;;AAEA;AACA;;AAEA;AACA,aAAa,mCAAmC;AAChD;AACA,+BAA+B,iBAAiB;AAChD;AACA;AACA,aAAa,kCAAkC;;AAE/C;AACA,aAAa,gCAAgC;AAC7C,WAAW,6CAA6C;AACxD,aAAa,+BAA+B;;AAE5C;AACA;AACA,eAAe,gCAAgC;AAC/C;AACA;;AAEA,aAAa,iCAAiC;AAC9C;;AAEA;AACA;AACA,kBAAkB,2BAA2B;AAC7C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW;AACX;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA,SAAS;AACT,OAAO;AACP;AACA;;AAEA;AACA;AACA;;AAEA;AACA,aAAa,gCAAgC;;AAE7C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;;;;;;;;;AC9Sd;AACA;AACa;;AAEb,iCAAiC,mBAAO,CAAC,GAAiB;;AAE1D;AACA;AACA;AACA;AACA;;AAEA,uEAAuE,aAAa;AACpF;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,gCAAgC;AAChC,GAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc,O;;;;;;;ACvGd,mC;;;;;;;ACAA;AACA;AACA,EAAE,cAAc;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA,CAAC;AACD;AACA,EAAE,cAAc;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW,cAAc;AACzB,WAAW,OAAO;AAClB,YAAY,MAAM;AAClB,YAAY;AACZ;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;;;;;;;ACjKA,cAAc;AACd;AACA,kBAAkB,mBAAO,CAAC,GAAe;AACzC,kBAAkB,mBAAO,CAAC,EAAa;AACvC,kBAAkB,mBAAO,CAAC,GAAoB;AAC9C;;;;;;;;;ACLa;;AAEb,gBAAgB,mBAAO,CAAC,GAAa;AACrC,YAAY,mBAAO,CAAC,GAAS;AAC7B,cAAc,mBAAO,CAAC,EAAW;;AAEjC,cAAc;AACd;AACA;AACA;AACA;;;;;;;;ACVA,aAAa,mBAAO,CAAC,GAAiB;AACtC,UAAU,mBAAO,CAAC,CAAe;AACjC,eAAe,mBAAO,CAAC,GAAU;AACjC,YAAY,mBAAO,CAAC,GAAc;;AAElC;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA,gCAAgC,kCAAkC;AAClE;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,6BAA6B,8BAA8B;;AAE3D;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,6BAA6B,8BAA8B;;AAE3D;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,GAAG;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL,GAAG;AACH;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;;;;;;;;AC7Od,mC;;;;;;;ACAA,mC;;;;;;;ACAA,oBAAoB,mBAAO,CAAC,GAAc;;AAE1C;AACA;AACA;AACA;AACA;;AAEA,gCAAgC;AAChC;AACA;;AAEA;AACA;AACA;AACA,gCAAgC,YAAY;AAC5C;AACA;AACA;AACA,gCAAgC,YAAY;AAC5C;AACA;AACA;AACA,gCAAgC,YAAY;AAC5C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,2BAA2B,YAAY,IAAI,kCAAkC;AAC7E;AACA,+BAA+B,gCAAgC,KAAK,8BAA8B;AAClG;AACA;AACA;AACA;;AAEA,cAAc;;;;;;;;AC3Dd,iBAAiB,mBAAO,CAAC,GAAkB;AAC3C,iBAAiB,mBAAO,CAAC,GAAgB;AACzC,iBAAiB,mBAAO,CAAC,GAAqB;AAC9C;;AAEA;AACA,cAAc;;AAEd;AACA;AACA;AACA,aAAa,aAAa;AAC1B,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;;AAEL;AACA;;AAEA;AACA;;;;;;;;AC1CA,cAAc,GAAG,mBAAO,CAAC,GAAQ;;;;;;;;;ACApB;AACb;AACA;AACA;AACA;AACA;AACA;AACA;AACA,sBAAsB,OAAO,iBAAiB,cAAc;AAC5D,wBAAwB,mBAAO,CAAC,EAAI;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,yBAAyB;AACzB;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,qCAAqC,IAAI,GAAG,oBAAoB;AAChE;AACA;AACA;AACA;AACA,qBAAqB,WAAW,EAAE,yBAAyB;AAC3D;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,mC;;;;;;;;;;;;;;AC7EA,WAAW,mBAAO,CAAC,EAAM;AACzB,UAAU,mBAAO,CAAC,CAAe;AACjC,SAAS,mBAAO,CAAC,GAAI;;AAErB;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,GAAG;;AAEH,eAAe,qCAAqC;AACpD;AACA;AACA;AACA,GAAG;;AAEH;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL,GAAG;;AAEH;AACA;;AAEA,cAAc;;;;;;;;;ACjFD;;AAEb,sBAAsB,OAAO,iBAAiB,cAAc;;AAE5D,+BAA+B,iFAAiF;;AAEhH,6BAA6B,mBAAO,CAAC,GAAQ;AAC7C,2BAA2B,mBAAO,CAAC,GAAM;AACzC,0BAA0B,mBAAO,CAAC,GAAK;AACvC,4BAA4B,mBAAO,CAAC,GAAO;AAC3C,2BAA2B,mBAAO,CAAC,GAAM;;AAEzC;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,kBAAkB,YAAY;AAC9B;AACA;AACA;AACA;AACA,KAAK;AACL;AACA,KAAK;AACL;AACA,KAAK;AACL;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,GAAG;AACH;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA,6BAA6B,qBAAqB;AAClD;AACA;AACA;AACA;;AAEA;AACA,QAAQ,mBAAmB;AAC3B,QAAQ,mBAAmB;AAC3B,SAAS;AACT,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,WAAW,mBAAO,CAAC,EAAU;AAC7B,CAAC;;AAED;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,kFAAkF;AAClF;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,EAAE;AACF;AACA;AACA,EAAE,yBAAyB,kCAAkC;AAC7D;AACA;AACA,EAAE;AACF;AACA;AACA,EAAE,mCAAmC;AACrC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,iHAAiH,UAAU,IAAI,YAAY;AAC3I;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA,EAAE;;AAEF;AACA;AACA,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI;AACJ;AACA,IAAI;AACJ,GAAG;AACH,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,IAAI;AACJ,+EAA+E,WAAW,WAAW,YAAY;AACjH;AACA,GAAG;AACH,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE;;AAEF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA,QAAQ,mBAAmB;AAC3B,YAAY,mBAAmB;AAC/B,eAAe,mBAAmB;AAClC,QAAQ,mBAAmB;AAC3B,QAAQ,mBAAmB;AAC3B,QAAQ;AACR,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,qEAAqE,SAAS;AAC9E;;AAEA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA,oEAAoE,WAAW,SAAS,eAAe;AACvG,IAAI;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI;AACJ;AACA,yEAAyE,WAAW,IAAI,YAAY;AACpG;AACA,GAAG;;AAEH;AACA;AACA;AACA;;AAEA;AACA;AACA,6CAA6C,WAAW,eAAe,YAAY;AACnF;AACA;;AAEA;AACA;AACA,GAAG;;AAEH;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA,IAAI;AACJ;AACA,4EAA4E,WAAW,IAAI,YAAY;AACvG;AACA,GAAG;AACH,EAAE;AACF;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,qBAAqB;AACrB;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,YAAY,EAAE;AACd,YAAY;AACZ;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE;AACF;AACA,qBAAqB;AACrB,EAAE;AACF;AACA,4CAA4C;AAC5C,EAAE;AACF;AACA;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF;AACA,8BAA8B,WAAW,mBAAmB;AAC5D,EAAE;AACF;AACA;AACA;AACA,EAAE;AACF;AACA,qBAAqB;AACrB;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAGA;AACA;AACA;AACA,EAAE;AACF;AACA,EAAE;AACF;AACA;AACA,EAAE;AACF;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE;AACF;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAGA;AACA;AACA;AACA,EAAE;AACF;AACA,EAAE;AACF;AACA;AACA;AACA,EAAE;AACF;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA,WAAW,KAAK;AAChB;AACA,yBAAyB,KAAK;AAC9B;AACA;;AAEA;AACA,YAAY,MAAM;AAClB;AACA,yBAAyB,MAAM;AAC/B;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,qBAAqB;AACrB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAY,KAAK;AACjB;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAY,KAAK;AACjB,aAAa,MAAM;AACnB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAY,KAAK;AACjB,aAAa,MAAM;AACnB;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAY,KAAK;AACjB;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAY,KAAK;AACjB;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA,OAAO,mBAAmB;AAC1B,WAAW,mBAAmB;AAC9B,OAAO,mBAAmB;AAC1B,UAAU,mBAAmB;AAC7B,OAAO,mBAAmB;AAC1B,UAAU,mBAAmB;AAC7B,QAAQ,mBAAmB;AAC3B,UAAU,mBAAmB;AAC7B,WAAW;AACX,CAAC;;AAED;AACA;;AAEA;AACA;AACA;AACA,EAAE;AACF;AACA,EAAE;AACF;AACA,EAAE;AACF;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4BAA4B,kBAAkB;;AAE9C;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA;;AAEA;AACA,OAAO,mBAAmB;AAC1B,UAAU,mBAAmB;AAC7B,MAAM,mBAAmB;AACzB,cAAc,mBAAmB;AACjC,cAAc,mBAAmB;AACjC,WAAW,mBAAmB;AAC9B,SAAS;AACT,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,gDAAgD;AAChD;AACA;AACA;AACA,IAAI;AACJ;AACA,6BAA6B,MAAM;AACnC;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,GAAG;;AAEH,iEAAiE;;AAEjE;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA,CAAC;;AAED;AACA,UAAU,mBAAmB;AAC7B,OAAO,mBAAmB;AAC1B,WAAW,mBAAmB;AAC9B,YAAY,mBAAmB;AAC/B,SAAS,mBAAmB;AAC5B,UAAU;AACV,CAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA,wBAAwB;AACxB;AACA;AACA;AACA,EAAE;AACF;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,kDAAkD,YAAY;AAC9D;AACA,KAAK;AACL,IAAI;AACJ;;AAEA;AACA,uCAAuC,YAAY,mBAAmB,YAAY;AAClF;AACA,GAAG;;AAEH;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,8DAA8D,YAAY;AAC1E;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,QAAQ;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,6DAA6D,YAAY;AACzE;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,IAAI;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAM;AACN;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;;AAEH;AACA,EAAE;AACF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA,cAAc,GAAG,OAAO;AACxB,sBAAsB,OAAO,iBAAiB,cAAc;AAC5D,OAAO,WAAW,OAAO;AACzB,OAAO;AACP,OAAO;AACP,OAAO;AACP,OAAO;;;;;;;;;ACzmDM;AACb;AACA,2BAA2B,+DAA+D,gBAAgB,EAAE,EAAE;AAC9G;AACA,mCAAmC,MAAM,6BAA6B,EAAE,YAAY,WAAW,EAAE;AACjG,kCAAkC,MAAM,iCAAiC,EAAE,YAAY,WAAW,EAAE;AACpG,+BAA+B,qFAAqF;AACpH;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,sBAAsB,OAAO,iBAAiB,cAAc;AAC5D,kBAAkB,mBAAO,CAAC,GAAW;AACrC,wBAAwB,mBAAO,CAAC,EAAI;AACpC,0BAA0B,mBAAO,CAAC,GAAM;AACxC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC,aAAa,OAAO,cAAc,OAAO,cAAc;AACxD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uCAAuC,OAAO;AAC9C;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA,yCAAyC;AACzC;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA,yCAAyC;AACzC,6BAA6B,UAAU,EAAE,eAAe,EAAE,oBAAoB;AAC9E;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,qCAAqC,sCAAsC;AAC3E;AACA,4DAA4D,KAAK;AACjE;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,0CAA0C,OAAO;AACjD;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,sCAAsC;AACtC;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,0CAA0C,OAAO;AACjD;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gCAAgC,KAAK;AACrC;AACA,OAAO;AACP,gC;;;;;;;ACzMA,cAAc;;AAEd;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;;;;;;;ACnBA,cAAc;;AAEd;AACA;;AAEA;AACA;AACA,YAAY,IAAI;AAChB,YAAY,QAAQ;AACpB,YAAY;AACZ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA,cAAc;;AAEd;AACA;AACA;AACA,WAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;ACzBA,WAAW,mBAAO,CAAC,GAAM;AACzB,aAAa,mBAAO,CAAC,GAAQ;AAC7B,oBAAoB,mBAAO,CAAC,GAAgB;;AAE5C,cAAc;AACd;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,WAAW;AACX;;AAEA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL,GAAG;AACH;AACA;AACA;;AAEA;AACA;;;AAGA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;;AAEA;AACA;;AAEA;AACA;AACA;AACA,uBAAuB,WAAW;AAClC;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,GAAG;AACH;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;;AAEH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;;;;;;;;AC/Ma;;AAEb;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,uBAAuB,MAAM,GAAG,sCAAsC;AACtE;AACA,KAAK;AACL,uBAAuB,MAAM,GAAG,YAAY,MAAM,YAAY;AAC9D,KAAK;AACL,mBAAmB,MAAM,GAAG,YAAY;AACxC;AACA,GAAG;AACH,iBAAiB,MAAM,GAAG,iBAAiB;AAC3C;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA,iBAAiB,KAAK,GAAG,WAAW,GAAG,wBAAwB;AAC/D,GAAG;AACH;AACA,kBAAkB,KAAK,IAAI,KAAK,GAAG,WAAW,GAAG,wBAAwB;AACzE;;AAEA,4BAA4B,cAAc;AAC1C;AACA,CAAC;AACD;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;;AAEA,cAAc;;;;;;;;ACnHd;AACA,cAAc;;AAEd;AACA;AACA;AACA,WAAW,OAAO;AAClB;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,WAAW,cAAc;AACzB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5BA,aAAa,mBAAO,CAAC,GAAQ;AAC7B;AACA,EAAE,cAAc;AAChB,gBAAgB,cAAc;AAC9B,EAAE,cAAc;AAChB,CAAC;AACD,EAAE,OAAO,GAAG,cAAc,GAAG,mBAAO,CAAC,GAA2B;AAChE,EAAE,OAAO,oBAAoB,OAAO;AACpC,EAAE,OAAO,YAAY,OAAO;AAC5B,EAAE,OAAO,YAAY,mBAAO,CAAC,GAA2B;AACxD,EAAE,OAAO,UAAU,mBAAO,CAAC,GAAyB;AACpD,EAAE,OAAO,aAAa,mBAAO,CAAC,GAA4B;AAC1D,EAAE,OAAO,eAAe,mBAAO,CAAC,GAA8B;AAC9D,EAAE,OAAO,YAAY,mBAAO,CAAC,GAAyC;AACtE,EAAE,OAAO,YAAY,mBAAO,CAAC,GAAoC;AACjE;;;;;;;;;ACfa;;AAEb;AACA;;AAEA;AACA;AACA,mBAAmB,SAAS;AAC5B;AACA;;AAEA;AACA,CAAC;;AAED;AACA;AACA;AACA;;AAEA;AACA;;AAEA,2BAA2B,gBAAgB;AAC3C;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA,mBAAmB,mBAAmB;AACtC;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,SAAS;AACT;AACA;AACA;AACA,SAAS;AACT;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA,aAAa;AACb;AACA;AACA,SAAS;AACT;AACA;;AAEA;AACA;;AAEA;AACA;AACA,SAAS;AACT;AACA;AACA;AACA,KAAK;AACL;;AAEA;AACA;AACA;AACA;AACA,KAAK;AACL;;AAEA;AACA;AACA;AACA;AACA,iDAAiD,EAAE;AACnD;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;;AAEA;AACA,kDAAkD,EAAE;AACpD;AACA,SAAS;AACT;;AAEA;AACA,mBAAmB,mBAAmB;AACtC;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA,kBAAkB,OAAO,WAAW,aAAa;AACjD;;AAEA,mBAAmB,kBAAkB;AACrC;AACA;;AAEA;AACA,uBAAuB,iBAAiB;AACxC;AACA;AACA;AACA,4BAA4B,sBAAsB;AAClD;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3Oa;;AAEb;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,EAAE;AACF;AACA;;AAEA;AACA;;AAEA,cAAc;AACd;AACA,uDAAuD,cAAc,IAAI,OAAO;AAChF;;AAEA,0BAA0B,YAAY;AACtC;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;;;;;;;AC3EA,iC;;;;;;;ACAA,mC;;;;;;;ACAA,iC;;;;;;;ACAA,iC;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEa;;AAEb;;AAEA,aAAa,mBAAO,CAAC,GAAa;AAClC;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4BAA4B;AAC5B;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,6BAA6B,sCAAsC,sCAAsC;AACzG;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,2BAA2B;AAC3B;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA,C;;;;;;;ACvSA;AACA,aAAa,mBAAO,CAAC,GAAM;AAC3B;AACA;AACA,EAAE,cAAc;AAChB,CAAC;AACD;AACA,EAAE,cAAc,GAAG,mBAAO,CAAC,GAAuB;AAClD;;;;;;;;ACRA,WAAW,mBAAO,CAAC,GAAI;AACvB,aAAa,mBAAO,CAAC,GAAM;;AAE3B,OAAO,iBAAiB;;AAExB;AACA;AACA;AACA,WAAW,OAAO;AAClB;AACA,YAAY,QAAQ;AACpB;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB;AACA,YAAY,QAAQ;AACpB;AACA;;AAEA;AACA;AACA;AACA;AACA,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB;;AAEA;AACA;AACA;AACA,WAAW,QAAQ;AACnB;AACA,YAAY,EAAE;AACd;AACA,iBAAiB,0CAA0C;;AAE3D;AACA;AACA;AACA;AACA,cAAc,WAAW;AACzB,cAAc,WAAW;AACzB,cAAc,OAAO;AACrB,cAAc,OAAO;AACrB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW,gBAAgB;AAC3B,WAAW,QAAQ;AACnB;AACA,YAAY,oBAAoB;AAChC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;;AAEH;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,WAAW,OAAO;AAClB,WAAW,OAAO;AAClB,WAAW,OAAO;AAClB;AACA,YAAY,QAAQ;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,WAAW,OAAO;AAClB,WAAW,OAAO;AAClB,WAAW,OAAO;AAClB;AACA,aAAa,QAAQ;AACrB;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW,gBAAgB;AAC3B,WAAW,QAAQ;AACnB;AACA,aAAa,sBAAsB;AACnC;AACA;AACA;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;;;;;;;;AClLA,cAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;;;;;;;AC5BA,OAAO,eAAe,GAAG,mBAAO,CAAC,GAAQ;AACzC;;AAEA,aAAa,mBAAO,CAAC,GAAW;;AAEhC;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA,wBAAwB,iFAAiF,KAAK;AAC9G;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,wBAAwB,iFAAiF,KAAK;AAC9G;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA,mBAAmB,oBAAoB;AACvC;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA,OAAO;AACP;AACA;AACA;AACA,KAAK;;AAEL;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,+BAA+B,cAAc;AAC7C,0BAA0B,eAAe;AACzC;AACA;;AAEA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA,SAAS;AACT,OAAO;AACP;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,2BAA2B,mCAAmC;AAC9D;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA,+BAA+B;AAC/B;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA,cAAc;AACd;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACt1BA,WAAW,mBAAO,CAAC,GAAM;AACzB,eAAe,mBAAO,CAAC,GAAU;AACjC,gBAAgB,mBAAO,CAAC,GAAW;;AAEnC;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;;AAEL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;;AAEL;AACA;AACA;AACA;;AAEA;AACA;;AAEA,cAAc,WAAW,iCAAiC;AAC1D,cAAc,eAAe,qEAAqE;AAClG,cAAc;;;;;;;;AC3Dd,+B;;;;;;;ACAA,YAAY,mBAAO,CAAC,GAAY;;AAEhC;AACA,cAAc;;AAEd;AACA;AACA;AACA;AACA,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB;AACA;AACA;AACA;;AAEA;AACA,oBAAoB,gBAAgB,EAAE;;AAEtC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA;;;;;;;;;ACjCa;;AAEb,YAAY,mBAAO,CAAC,GAAS;;AAE7B;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,gCAAgC;AAChC;AACA,KAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA,wCAAwC,gCAAgC;;AAExE;AACA,uCAAuC;;AAEvC;AACA;AACA;AACA;AACA;AACA,uBAAuB;AACvB;;AAEA;AACA;AACA,mBAAmB,kBAAkB;AACrC;AACA;AACA;AACA,iBAAiB;AACjB;AACA;AACA;AACA,iCAAiC;AACjC;AACA;AACA;;AAEA,eAAe,kBAAkB;AACjC;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA,SAAS;AACT;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,SAAS;AACT;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA,kCAAkC,QAAQ;AAC1C;AACA;;AAEA;AACA;AACA,SAAS;AACT;AACA;AACA;AACA;AACA,uBAAuB;AACvB,aAAa;AACb;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,aAAa;AACb;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;AACd;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA,mBAAmB,iBAAiB;AACpC;AACA;AACA;AACA;;AAEA;AACA;;;;;;;;ACvPA,iC;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;;AAEY;;AAEZ;AACA;AACA;AACA;;AAEA,SAAS,mBAAO,CAAC,GAAS;AAC1B,cAAc,mBAAO,CAAC,GAAM;;AAE5B;AACA;AACA;AACA;;AAEA,mCAAmC,SAAS;AAC5C;;AAEA;AACA;AACA;AACA;;AAEA,OAAO;AACP,OAAO,aAAa;AACpB,OAAO;AACP,OAAO;AACP,OAAO;AACP,OAAO;AACP,OAAO;;AAEP;AACA,aAAa,OAAO,aAAa,OAAO;;AAExC;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,MAAM,OAAO;AACb;;AAEA;AACA;AACA;;AAEA;AACA;AACA,kBAAkB,OAAO;AACzB,2BAA2B;AAC3B;;AAEA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA,sBAAsB,OAAO;;AAE7B;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,WAAW,OAAO;AAClB,YAAY;AACZ;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,SAAS,OAAO;AAChB;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA,mBAAmB,iBAAiB;AACpC;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;;;;;;;;AC3LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;AACA;;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACa;AACb;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;;AAGA,cAAc;;AAEd,eAAe,mBAAO,CAAC,GAAoB;;AAE3C,eAAe,mBAAO,CAAC,GAAoB;;AAE3C,mBAAO,CAAC,GAAU;;AAElB;AACA;AACA;;AAEA,iBAAiB,iBAAiB;AAClC;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC,EAAE;;AAEH;AACA;AACA,wCAAwC;AACxC;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;;;AAGA;AACA;AACA;AACA,CAAC,E;;;;;;;AC1ID,gC;;;;;;;ACAA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,cAAc,GAAG,mBAAO,CAAC,GAAW;;;;;;;;;ACVpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACa;;AAEb,cAAc;;AAEd,gBAAgB,mBAAO,CAAC,GAAqB;;AAE7C,mBAAO,CAAC,GAAU;;AAElB;AACA;AACA;AACA;;AAEA;AACA;AACA,E;;;;;;;ACtCA,iBAAiB,mBAAO,CAAC,GAAkB;AAC3C,iBAAiB,mBAAO,CAAC,GAAgB;AACzC,iBAAiB,mBAAO,CAAC,GAAqB;AAC9C;;AAEA;AACA,cAAc;AACd;AACA,cAAc;AACd,cAAc;;AAEd;AACA;AACA;AACA,aAAa,aAAa;AAC1B,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB,aAAa,SAAS;AACtB;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,GAAG;;AAEH;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,aAAa,MAAM;AACnB,aAAa,MAAM;AACnB,aAAa,OAAO;AACpB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,aAAa,MAAM;AACnB,aAAa,MAAM;AACnB,aAAa,OAAO;AACpB;AACA;AACA;AACA;AACA;;;;;;;;;AC1Ea;;AAEb,0CAA0C,gCAAgC,oCAAoC,oDAAoD,8DAA8D,gEAAgE,EAAE,EAAE,gCAAgC,EAAE,aAAa;;AAEnV,gCAAgC,gBAAgB,sBAAsB,OAAO,uDAAuD,aAAa,uDAAuD,2CAA2C,EAAE,EAAE,EAAE,6CAA6C,2EAA2E,EAAE,OAAO,iDAAiD,kFAAkF,EAAE,EAAE,EAAE,EAAE,eAAe;;AAEphB,2CAA2C,kBAAkB,kCAAkC,qEAAqE,EAAE,EAAE,OAAO,kBAAkB,EAAE,YAAY;;AAE/M,iDAAiD,0CAA0C,0DAA0D,EAAE;;AAEvJ,2CAA2C,gBAAgB,kBAAkB,OAAO,2BAA2B,wDAAwD,gCAAgC,uDAAuD,2DAA2D,EAAE;;AAE3T,6DAA6D,sEAAsE,8DAA8D,oBAAoB;;AAErN,eAAe,mBAAO,CAAC,GAAQ;AAC/B;;AAEA,gBAAgB,mBAAO,CAAC,GAAM;AAC9B;;AAEA;;AAEA;AACA;AACA;;AAEA,cAAc;AACd;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kDAAkD;AAClD;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,0DAA0D;AAC1D;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA,KAAK;;AAEL,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,OAAO;AACP;AACA;AACA,OAAO;AACP;AACA;AACA;;AAEA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA,KAAK;;AAEL,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,0CAA0C;AAC1C;;AAEA;AACA;AACA;AACA,2CAA2C;AAC3C,WAAW;AACX;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,KAAK;;AAEL,GAAG;AACH;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,2CAA2C;AAC3C,WAAW;AACX;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,KAAK;;AAEL,GAAG;AACH;AACA;AACA,2CAA2C;AAC3C;AACA;AACA;AACA;AACA,OAAO;AACP;AACA,GAAG;;AAEH;AACA,CAAC,G;;;;;;;;ACjNY;;AAEb,YAAY,mBAAO,CAAC,GAAS;AAC7B,cAAc,mBAAO,CAAC,EAAW;AACjC;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;AACL;AACA,KAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA,mBAAmB,oBAAoB;AACvC;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,cAAc;AACd;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA,KAAK;AACL;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,mBAAmB,oBAAoB;AACvC;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA,2CAA2C;AAC3C;AACA,SAAS;AACT;AACA;AACA;AACA;;AAEA;AACA;;;;;;;;;ACrRA;AACA;AACA;;AAEA,cAAc,GAAG,mBAAO,CAAC,GAAM;;;;;;;;;ACL/B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,0CAA0C,YAAY;AACtD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACa;;AAEb,cAAc;;AAEd,qBAAqB,mBAAO,CAAC,GAAW;AACxC;AACA;AACA;AACA;;AAEA,aAAa,mBAAO,CAAC,GAAkB;;AAEvC,mBAAO,CAAC,GAAU;;AAElB;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI;;AAEJ,0CAA0C;AAC1C;AACA;;AAEA;;AAEA;AACA;AACA;AACA,GAAG;;;AAGH;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA,EAAE;AACF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAGA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,EAAE;AACF;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;;AAEA;AACA;AACA;AACA,sBAAsB;AACtB;AACA;;AAEA;AACA;AACA;AACA,C;;;;;;;ACxMA,qBAAqB,mBAAO,CAAC,GAAiB;AAC9C,WAAW,mBAAO,CAAC,GAAM;AACzB,WAAW,mBAAO,CAAC,GAAM;AACzB,WAAW,mBAAO,CAAC,GAAM;AACzB,YAAY,mBAAO,CAAC,GAAO;AAC3B,eAAe,mBAAO,CAAC,GAAK;AAC5B,SAAS,mBAAO,CAAC,GAAI;AACrB,WAAW,mBAAO,CAAC,GAAY;AAC/B,eAAe,mBAAO,CAAC,GAAU;AACjC,eAAe,mBAAO,CAAC,EAAe;;AAEtC;AACA,cAAc;;AAEd;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,WAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;;AAEA;AACA;AACA,eAAe;AACf;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,GAAG;AACH;AACA,GAAG;AACH;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,KAAK;AACL;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,OAAO;AACP;;AAEA;AACA,GAAG;AACH;;AAEA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,KAAK;AACL;;AAEA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA,8CAA8C;AAC9C;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;AACA;AACA,GAAG;AACH;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,GAAG;AACH;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA,yCAAyC;AACzC;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA,6CAA6C,SAAS;AACtD;;AAEA;AACA;AACA;AACA,OAAO;AACP;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,iBAAiB,QAAQ;AACzB;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,KAAK;;AAEL;AACA,GAAG;AACH;;AAEA;AACA;AACA;AACA,kBAAkB;AAClB;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK;;AAEL;AACA,GAAG;;AAEH;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,GAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA,GAAG;;AAEH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;;AC7eA,YAAY,mBAAO,CAAC,GAAY;AAChC,YAAY,mBAAO,CAAC,GAAY;AAChC;;AAEA;AACA,cAAc;;AAEd;AACA;AACA;AACA;AACA,WAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,KAAK;AACL;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,cAAc","file":"index.js","sourcesContent":[" \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t__webpack_require__.ab = __dirname + \"/\";\n\n \t// the startup function\n \tfunction startup() {\n \t\t// Load entry module and return exports\n \t\treturn __webpack_require__(104);\n \t};\n\n \t// run startup\n \treturn startup();\n","var once = require('once');\n\nvar noop = function() {};\n\nvar isRequest = function(stream) {\n\treturn stream.setHeader && typeof stream.abort === 'function';\n};\n\nvar isChildProcess = function(stream) {\n\treturn stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3\n};\n\nvar eos = function(stream, opts, callback) {\n\tif (typeof opts === 'function') return eos(stream, null, opts);\n\tif (!opts) opts = {};\n\n\tcallback = once(callback || noop);\n\n\tvar ws = stream._writableState;\n\tvar rs = stream._readableState;\n\tvar readable = opts.readable || (opts.readable !== false && stream.readable);\n\tvar writable = opts.writable || (opts.writable !== false && stream.writable);\n\tvar cancelled = false;\n\n\tvar onlegacyfinish = function() {\n\t\tif (!stream.writable) onfinish();\n\t};\n\n\tvar onfinish = function() {\n\t\twritable = false;\n\t\tif (!readable) callback.call(stream);\n\t};\n\n\tvar onend = function() {\n\t\treadable = false;\n\t\tif (!writable) callback.call(stream);\n\t};\n\n\tvar onexit = function(exitCode) {\n\t\tcallback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);\n\t};\n\n\tvar onerror = function(err) {\n\t\tcallback.call(stream, err);\n\t};\n\n\tvar onclose = function() {\n\t\tprocess.nextTick(onclosenexttick);\n\t};\n\n\tvar onclosenexttick = function() {\n\t\tif (cancelled) return;\n\t\tif (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));\n\t\tif (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));\n\t};\n\n\tvar onrequest = function() {\n\t\tstream.req.on('finish', onfinish);\n\t};\n\n\tif (isRequest(stream)) {\n\t\tstream.on('complete', onfinish);\n\t\tstream.on('abort', onclose);\n\t\tif (stream.req) onrequest();\n\t\telse stream.on('request', onrequest);\n\t} else if (writable && !ws) { // legacy streams\n\t\tstream.on('end', onlegacyfinish);\n\t\tstream.on('close', onlegacyfinish);\n\t}\n\n\tif (isChildProcess(stream)) stream.on('exit', onexit);\n\n\tstream.on('end', onend);\n\tstream.on('finish', onfinish);\n\tif (opts.error !== false) stream.on('error', onerror);\n\tstream.on('close', onclose);\n\n\treturn function() {\n\t\tcancelled = true;\n\t\tstream.removeListener('complete', onfinish);\n\t\tstream.removeListener('abort', onclose);\n\t\tstream.removeListener('request', onrequest);\n\t\tif (stream.req) stream.req.removeListener('finish', onfinish);\n\t\tstream.removeListener('end', onlegacyfinish);\n\t\tstream.removeListener('close', onlegacyfinish);\n\t\tstream.removeListener('finish', onfinish);\n\t\tstream.removeListener('exit', onexit);\n\t\tstream.removeListener('end', onend);\n\t\tstream.removeListener('error', onerror);\n\t\tstream.removeListener('close', onclose);\n\t};\n};\n\nmodule.exports = eos;\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n","'use strict';\n\nvar replace = String.prototype.replace;\nvar percentTwenties = /%20/g;\n\nvar util = require('./utils');\n\nvar Format = {\n RFC1738: 'RFC1738',\n RFC3986: 'RFC3986'\n};\n\nmodule.exports = util.assign(\n {\n 'default': Format.RFC3986,\n formatters: {\n RFC1738: function (value) {\n return replace.call(value, percentTwenties, '+');\n },\n RFC3986: function (value) {\n return String(value);\n }\n }\n },\n Format\n);\n",null,"'use strict';\n\nvar _Object$setPrototypeO;\n\nfunction _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\n\nvar finished = require('./end-of-stream');\n\nvar kLastResolve = Symbol('lastResolve');\nvar kLastReject = Symbol('lastReject');\nvar kError = Symbol('error');\nvar kEnded = Symbol('ended');\nvar kLastPromise = Symbol('lastPromise');\nvar kHandlePromise = Symbol('handlePromise');\nvar kStream = Symbol('stream');\n\nfunction createIterResult(value, done) {\n return {\n value: value,\n done: done\n };\n}\n\nfunction readAndResolve(iter) {\n var resolve = iter[kLastResolve];\n\n if (resolve !== null) {\n var data = iter[kStream].read(); // we defer if data is null\n // we can be expecting either 'end' or\n // 'error'\n\n if (data !== null) {\n iter[kLastPromise] = null;\n iter[kLastResolve] = null;\n iter[kLastReject] = null;\n resolve(createIterResult(data, false));\n }\n }\n}\n\nfunction onReadable(iter) {\n // we wait for the next tick, because it might\n // emit an error with process.nextTick\n process.nextTick(readAndResolve, iter);\n}\n\nfunction wrapForNext(lastPromise, iter) {\n return function (resolve, reject) {\n lastPromise.then(function () {\n if (iter[kEnded]) {\n resolve(createIterResult(undefined, true));\n return;\n }\n\n iter[kHandlePromise](resolve, reject);\n }, reject);\n };\n}\n\nvar AsyncIteratorPrototype = Object.getPrototypeOf(function () {});\nvar ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {\n get stream() {\n return this[kStream];\n },\n\n next: function next() {\n var _this = this;\n\n // if we have detected an error in the meanwhile\n // reject straight away\n var error = this[kError];\n\n if (error !== null) {\n return Promise.reject(error);\n }\n\n if (this[kEnded]) {\n return Promise.resolve(createIterResult(undefined, true));\n }\n\n if (this[kStream].destroyed) {\n // We need to defer via nextTick because if .destroy(err) is\n // called, the error will be emitted via nextTick, and\n // we cannot guarantee that there is no error lingering around\n // waiting to be emitted.\n return new Promise(function (resolve, reject) {\n process.nextTick(function () {\n if (_this[kError]) {\n reject(_this[kError]);\n } else {\n resolve(createIterResult(undefined, true));\n }\n });\n });\n } // if we have multiple next() calls\n // we will wait for the previous Promise to finish\n // this logic is optimized to support for await loops,\n // where next() is only called once at a time\n\n\n var lastPromise = this[kLastPromise];\n var promise;\n\n if (lastPromise) {\n promise = new Promise(wrapForNext(lastPromise, this));\n } else {\n // fast path needed to support multiple this.push()\n // without triggering the next() queue\n var data = this[kStream].read();\n\n if (data !== null) {\n return Promise.resolve(createIterResult(data, false));\n }\n\n promise = new Promise(this[kHandlePromise]);\n }\n\n this[kLastPromise] = promise;\n return promise;\n }\n}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {\n return this;\n}), _defineProperty(_Object$setPrototypeO, \"return\", function _return() {\n var _this2 = this;\n\n // destroy(err, cb) is a private API\n // we can guarantee we have that here, because we control the\n // Readable class this is attached to\n return new Promise(function (resolve, reject) {\n _this2[kStream].destroy(null, function (err) {\n if (err) {\n reject(err);\n return;\n }\n\n resolve(createIterResult(undefined, true));\n });\n });\n}), _Object$setPrototypeO), AsyncIteratorPrototype);\n\nvar createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {\n var _Object$create;\n\n var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {\n value: stream,\n writable: true\n }), _defineProperty(_Object$create, kLastResolve, {\n value: null,\n writable: true\n }), _defineProperty(_Object$create, kLastReject, {\n value: null,\n writable: true\n }), _defineProperty(_Object$create, kError, {\n value: null,\n writable: true\n }), _defineProperty(_Object$create, kEnded, {\n value: stream._readableState.endEmitted,\n writable: true\n }), _defineProperty(_Object$create, kHandlePromise, {\n value: function value(resolve, reject) {\n var data = iterator[kStream].read();\n\n if (data) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(data, false));\n } else {\n iterator[kLastResolve] = resolve;\n iterator[kLastReject] = reject;\n }\n },\n writable: true\n }), _Object$create));\n iterator[kLastPromise] = null;\n finished(stream, function (err) {\n if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {\n var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise\n // returned by next() and store the error\n\n if (reject !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n reject(err);\n }\n\n iterator[kError] = err;\n return;\n }\n\n var resolve = iterator[kLastResolve];\n\n if (resolve !== null) {\n iterator[kLastPromise] = null;\n iterator[kLastResolve] = null;\n iterator[kLastReject] = null;\n resolve(createIterResult(undefined, true));\n }\n\n iterator[kEnded] = true;\n });\n stream.on('readable', onReadable.bind(null, iterator));\n return iterator;\n};\n\nmodule.exports = createReadableStreamAsyncIterator;","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","// populates missing values\nmodule.exports = function(dst, src) {\n\n Object.keys(src).forEach(function(prop)\n {\n dst[prop] = dst[prop] || src[prop];\n });\n\n return dst;\n};\n","module.exports = require(\"os\");","var serialOrdered = require('./serialOrdered.js');\n\n// Public API\nmodule.exports = serial;\n\n/**\n * Runs iterator over provided array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serial(list, iterator, callback)\n{\n return serialOrdered(list, iterator, null, callback);\n}\n","const crypto = require('crypto')\nconst util = require('util')\nconst fs = require('fs')\n\nconst ppump = util.promisify(require('pump'))\n\n/**\n * neocitiesLocalDiff returns an array of files to delete and update and some useful stats.\n * @param {Array} neocitiesFiles Array of files returned from the neocities list api.\n * @param {Array} localListing Array of files returned by a full data async-folder-walker run.\n * @return {Promise} Object of filesToUpload, filesToDelete and filesSkipped.\n */\nasync function neocitiesLocalDiff (neocitiesFiles, localListing) {\n const localIndex = {}\n const ncIndex = {}\n\n const neoCitiesFiltered = neocitiesFiles.filter(f => !f.is_directory)\n neoCitiesFiltered.forEach(f => { ncIndex[f.path] = f }) // index\n const ncFiles = new Set(neoCitiesFiltered.map(f => f.path)) // shape\n\n const localListingFiltered = localListing.filter(f => !f.stat.isDirectory()) // files only\n localListingFiltered.forEach(f => { localIndex[forceUnixRelname(f.relname)] = f }) // index\n const localFiles = new Set(localListingFiltered.map(f => forceUnixRelname(f.relname))) // shape\n\n const filesToAdd = difference(localFiles, ncFiles)\n const filesToDelete = difference(ncFiles, localFiles)\n\n const maybeUpdate = intersection(localFiles, ncFiles)\n const skipped = new Set()\n\n for (const p of maybeUpdate) {\n const local = localIndex[p]\n const remote = ncIndex[p]\n\n if (local.stat.size !== remote.size) { filesToAdd.add(p); continue }\n\n const localSha1 = await sha1FromPath(local.filepath)\n if (localSha1 !== remote.sha1_hash) { filesToAdd.add(p); continue }\n\n skipped.add(p)\n }\n\n return {\n filesToUpload: Array.from(filesToAdd).map(p => ({\n name: forceUnixRelname(localIndex[p].relname),\n path: localIndex[p].filepath\n })),\n filesToDelete: Array.from(filesToDelete).map(p => ncIndex[p].path),\n filesSkipped: Array.from(skipped).map(p => localIndex[p])\n }\n}\n\nmodule.exports = {\n neocitiesLocalDiff\n}\n\n/**\n * sha1FromPath returns a sha1 hex from a path\n * @param {String} p string of the path of the file to hash\n * @return {Promise} the hex representation of the sha1\n */\nasync function sha1FromPath (p) {\n const rs = fs.createReadStream(p)\n const hash = crypto.createHash('sha1')\n\n await ppump(rs, hash)\n\n return hash.digest('hex')\n}\n\n// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set#Implementing_basic_set_operations\n\n/**\n * difference returnss the difference betwen setA and setB.\n * @param {Set} setA LHS set\n * @param {Set} setB RHS set\n * @return {Set} The difference Set\n */\nfunction difference (setA, setB) {\n const _difference = new Set(setA)\n for (const elem of setB) {\n _difference.delete(elem)\n }\n return _difference\n}\n\n/**\n * intersection returns the interesction between setA and setB.\n * @param {Set} setA setA LHS set\n * @param {Set} setB setB RHS set\n * @return {Set} The intersection set between setA and setB.\n */\nfunction intersection (setA, setB) {\n const _intersection = new Set()\n for (const elem of setB) {\n if (setA.has(elem)) {\n _intersection.add(elem)\n }\n }\n return _intersection\n}\n\n/**\n * forceUnixRelname forces a OS dependent path to a unix style path.\n * @param {String} relname String path to convert to unix style.\n * @return {String} The unix variant of the path\n */\nfunction forceUnixRelname (relname) {\n return relname.split(relname.sep).join('/')\n}\n\n/**\n * Example of neocitiesFiles\n */\n// [\n// {\n// path: 'img',\n// is_directory: true,\n// updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000'\n// },\n// {\n// path: 'index.html',\n// is_directory: false,\n// size: 1094,\n// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',\n// sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d'\n// },\n// {\n// path: 'neocities.png',\n// is_directory: false,\n// size: 13232,\n// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',\n// sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4'\n// },\n// {\n// path: 'not_found.html',\n// is_directory: false,\n// size: 347,\n// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',\n// sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030'\n// },\n// {\n// path: 'style.css',\n// is_directory: false,\n// size: 298,\n// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',\n// sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8'\n// }\n// ]\n\n/**\n * Example of localListing\n */\n// [{\n// root: '/Users/bret/repos/async-folder-walker/fixtures',\n// filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder',\n// stat: Stats {\n// dev: 16777220,\n// mode: 16877,\n// nlink: 3,\n// uid: 501,\n// gid: 20,\n// rdev: 0,\n// blksize: 4096,\n// ino: 30244023,\n// size: 96,\n// blocks: 0,\n// atimeMs: 1574381262779.8396,\n// mtimeMs: 1574380914743.5474,\n// ctimeMs: 1574380914743.5474,\n// birthtimeMs: 1574380905232.5996,\n// atime: 2019-11-22T00:07:42.780Z,\n// mtime: 2019-11-22T00:01:54.744Z,\n// ctime: 2019-11-22T00:01:54.744Z,\n// birthtime: 2019-11-22T00:01:45.233Z\n// },\n// relname: 'sub-folder/sub-sub-folder',\n// basename: 'sub-sub-folder'\n// }]\n","const core = require('@actions/core')\n// const github = require('@actions/github')\nconst Neocities = require('async-neocities')\nconst path = require('path')\nconst ms = require('ms')\nconst assert = require('nanoassert')\nconst fsp = require('fs').promises\n\nasync function doDeploy () {\n const token = core.getInput('api_token')\n const distDir = path.join(process.cwd(), core.getInput('dist_dir'))\n const cleanup = JSON.parse(core.getInput('cleanup'))\n\n assert(typeof cleanup === 'boolean', 'Cleanup input must be a boolean \"true\" or \"false\"')\n const stat = await fsp.stat(distDir)\n assert(stat.isDirectory(), 'dist_dir must be a directory that exists')\n\n const client = new Neocities(token)\n\n const stats = await client.deploy(distDir, {\n cleanup,\n statsCb: Neocities.statsHandler()\n })\n\n console.log(`Deployed to Neocities in ${ms(stats.time)}:`)\n console.log(` Uploaded ${stats.filesToUpload.length} files`)\n console.log(` ${cleanup ? 'Deleted' : 'Orphaned'} ${stats.filesToDelete.length} files`)\n console.log(` Skipped ${stats.filesSkipped.length} files`)\n}\n\ndoDeploy().catch(err => {\n console.error(err)\n core.setFailed(err.message)\n})\n","const { Writable, Transform } = require('streamx')\nconst pump = require('pump')\nconst pumpify = require('pumpify')\n\nfunction getStreamLength (readable) {\n let length = 0\n\n const dummyLoad = new Writable({\n write (data, cb) {\n length += data.length\n cb(null)\n }\n })\n\n return new Promise((resolve, reject) => {\n pump(readable, dummyLoad, (err) => {\n if (err) return reject(err)\n resolve(length)\n })\n })\n}\n\nfunction meterStream (readable, statsCb) {\n let bytesRead = 0\n const meter = new Transform({\n transform (data, cb) {\n bytesRead += data.length\n statsCb(bytesRead)\n cb(null, data)\n }\n })\n return pumpify(readable, meter)\n}\n\nmodule.exports = {\n getStreamLength,\n meterStream\n}\n","const FixedFIFO = require('./fixed-size')\n\nmodule.exports = class FastFIFO {\n constructor (hwm) {\n this.hwm = hwm || 16\n this.head = new FixedFIFO(this.hwm)\n this.tail = this.head\n }\n\n push (val) {\n if (!this.head.push(val)) {\n const prev = this.head\n this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length)\n this.head.push(val)\n }\n }\n\n shift () {\n const val = this.tail.shift()\n if (val === undefined && this.tail.next) {\n const next = this.tail.next\n this.tail.next = null\n this.tail = next\n return this.tail.shift()\n }\n return val\n }\n\n isEmpty () {\n return this.head.isEmpty()\n }\n}\n","// API\nmodule.exports = state;\n\n/**\n * Creates initial state object\n * for iteration over list\n *\n * @param {array|object} list - list to iterate over\n * @param {function|null} sortMethod - function to use for keys sort,\n * or `null` to keep them as is\n * @returns {object} - initial state object\n */\nfunction state(list, sortMethod)\n{\n var isNamedList = !Array.isArray(list)\n , initState =\n {\n index : 0,\n keyedList: isNamedList || sortMethod ? Object.keys(list) : null,\n jobs : {},\n results : isNamedList ? {} : [],\n size : isNamedList ? Object.keys(list).length : list.length\n }\n ;\n\n if (sortMethod)\n {\n // sort array keys based on it's values\n // sort object's keys just on own merit\n initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)\n {\n return sortMethod(list[a], list[b]);\n });\n }\n\n return initState;\n}\n","/* eslint-disable node/no-deprecated-api */\nvar buffer = require('buffer')\nvar Buffer = buffer.Buffer\n\n// alternative to using Object.keys for old browsers\nfunction copyProps (src, dst) {\n for (var key in src) {\n dst[key] = src[key]\n }\n}\nif (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {\n module.exports = buffer\n} else {\n // Copy properties from require('buffer')\n copyProps(buffer, exports)\n exports.Buffer = SafeBuffer\n}\n\nfunction SafeBuffer (arg, encodingOrOffset, length) {\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.prototype = Object.create(Buffer.prototype)\n\n// Copy static methods from Buffer\ncopyProps(Buffer, SafeBuffer)\n\nSafeBuffer.from = function (arg, encodingOrOffset, length) {\n if (typeof arg === 'number') {\n throw new TypeError('Argument must not be a number')\n }\n return Buffer(arg, encodingOrOffset, length)\n}\n\nSafeBuffer.alloc = function (size, fill, encoding) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n var buf = Buffer(size)\n if (fill !== undefined) {\n if (typeof encoding === 'string') {\n buf.fill(fill, encoding)\n } else {\n buf.fill(fill)\n }\n } else {\n buf.fill(0)\n }\n return buf\n}\n\nSafeBuffer.allocUnsafe = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return Buffer(size)\n}\n\nSafeBuffer.allocUnsafeSlow = function (size) {\n if (typeof size !== 'number') {\n throw new TypeError('Argument must be a number')\n }\n return buffer.SlowBuffer(size)\n}\n","var Stream = require('stream').Stream;\nvar util = require('util');\n\nmodule.exports = DelayedStream;\nfunction DelayedStream() {\n this.source = null;\n this.dataSize = 0;\n this.maxDataSize = 1024 * 1024;\n this.pauseStream = true;\n\n this._maxDataSizeExceeded = false;\n this._released = false;\n this._bufferedEvents = [];\n}\nutil.inherits(DelayedStream, Stream);\n\nDelayedStream.create = function(source, options) {\n var delayedStream = new this();\n\n options = options || {};\n for (var option in options) {\n delayedStream[option] = options[option];\n }\n\n delayedStream.source = source;\n\n var realEmit = source.emit;\n source.emit = function() {\n delayedStream._handleEmit(arguments);\n return realEmit.apply(source, arguments);\n };\n\n source.on('error', function() {});\n if (delayedStream.pauseStream) {\n source.pause();\n }\n\n return delayedStream;\n};\n\nObject.defineProperty(DelayedStream.prototype, 'readable', {\n configurable: true,\n enumerable: true,\n get: function() {\n return this.source.readable;\n }\n});\n\nDelayedStream.prototype.setEncoding = function() {\n return this.source.setEncoding.apply(this.source, arguments);\n};\n\nDelayedStream.prototype.resume = function() {\n if (!this._released) {\n this.release();\n }\n\n this.source.resume();\n};\n\nDelayedStream.prototype.pause = function() {\n this.source.pause();\n};\n\nDelayedStream.prototype.release = function() {\n this._released = true;\n\n this._bufferedEvents.forEach(function(args) {\n this.emit.apply(this, args);\n }.bind(this));\n this._bufferedEvents = [];\n};\n\nDelayedStream.prototype.pipe = function() {\n var r = Stream.prototype.pipe.apply(this, arguments);\n this.resume();\n return r;\n};\n\nDelayedStream.prototype._handleEmit = function(args) {\n if (this._released) {\n this.emit.apply(this, args);\n return;\n }\n\n if (args[0] === 'data') {\n this.dataSize += args[1].length;\n this._checkIfMaxDataSizeExceeded();\n }\n\n this._bufferedEvents.push(args);\n};\n\nDelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {\n if (this._maxDataSizeExceeded) {\n return;\n }\n\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n this._maxDataSizeExceeded = true;\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'\n this.emit('error', new Error(message));\n};\n","var async = require('./async.js')\n , abort = require('./abort.js')\n ;\n\n// API\nmodule.exports = iterate;\n\n/**\n * Iterates over each job object\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {object} state - current job status\n * @param {function} callback - invoked when all elements processed\n */\nfunction iterate(list, iterator, state, callback)\n{\n // store current index\n var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;\n\n state.jobs[key] = runJob(iterator, key, list[key], function(error, output)\n {\n // don't repeat yourself\n // skip secondary callbacks\n if (!(key in state.jobs))\n {\n return;\n }\n\n // clean up jobs\n delete state.jobs[key];\n\n if (error)\n {\n // don't process rest of the results\n // stop still active jobs\n // and reset the list\n abort(state);\n }\n else\n {\n state.results[key] = output;\n }\n\n // return salvaged results\n callback(error, state.results);\n });\n}\n\n/**\n * Runs iterator over provided job element\n *\n * @param {function} iterator - iterator to invoke\n * @param {string|number} key - key/index of the element in the list of jobs\n * @param {mixed} item - job description\n * @param {function} callback - invoked after iterator is done with the job\n * @returns {function|mixed} - job abort function or something else\n */\nfunction runJob(iterator, key, item, callback)\n{\n var aborter;\n\n // allow shortcut if iterator expects only two arguments\n if (iterator.length == 2)\n {\n aborter = iterator(item, async(callback));\n }\n // otherwise go with full three arguments\n else\n {\n aborter = iterator(item, key, async(callback));\n }\n\n return aborter;\n}\n","'use strict';\n\nfunction asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }\n\nfunction _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"next\", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, \"throw\", err); } _next(undefined); }); }; }\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }\n\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }\n\nfunction _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\n\nvar ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE;\n\nfunction from(Readable, iterable, opts) {\n var iterator;\n\n if (iterable && typeof iterable.next === 'function') {\n iterator = iterable;\n } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);\n\n var readable = new Readable(_objectSpread({\n objectMode: true\n }, opts)); // Reading boolean to protect against _read\n // being called before last iteration completion.\n\n var reading = false;\n\n readable._read = function () {\n if (!reading) {\n reading = true;\n next();\n }\n };\n\n function next() {\n return _next2.apply(this, arguments);\n }\n\n function _next2() {\n _next2 = _asyncToGenerator(function* () {\n try {\n var _ref = yield iterator.next(),\n value = _ref.value,\n done = _ref.done;\n\n if (done) {\n readable.push(null);\n } else if (readable.push((yield value))) {\n next();\n } else {\n reading = false;\n }\n } catch (err) {\n readable.destroy(err);\n }\n });\n return _next2.apply(this, arguments);\n }\n\n return readable;\n}\n\nmodule.exports = from;","module.exports = require(\"https\");","'use strict';\n\nvar ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE;\n\nfunction highWaterMarkFrom(options, isDuplex, duplexKey) {\n return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;\n}\n\nfunction getHighWaterMark(state, options, duplexKey, isDuplex) {\n var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);\n\n if (hwm != null) {\n if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {\n var name = isDuplex ? duplexKey : 'highWaterMark';\n throw new ERR_INVALID_OPT_VALUE(name, hwm);\n }\n\n return Math.floor(hwm);\n } // Default value\n\n\n return state.objectMode ? 16 : 16 * 1024;\n}\n\nmodule.exports = {\n getHighWaterMark: getHighWaterMark\n};","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n'use strict';\n\nmodule.exports = Readable;\n/**/\n\nvar Duplex;\n/**/\n\nReadable.ReadableState = ReadableState;\n/**/\n\nvar EE = require('events').EventEmitter;\n\nvar EElistenerCount = function EElistenerCount(emitter, type) {\n return emitter.listeners(type).length;\n};\n/**/\n\n/**/\n\n\nvar Stream = require('./internal/streams/stream');\n/**/\n\n\nvar Buffer = require('buffer').Buffer;\n\nvar OurUint8Array = global.Uint8Array || function () {};\n\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\n\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n/**/\n\n\nvar debugUtil = require('util');\n\nvar debug;\n\nif (debugUtil && debugUtil.debuglog) {\n debug = debugUtil.debuglog('stream');\n} else {\n debug = function debug() {};\n}\n/**/\n\n\nvar BufferList = require('./internal/streams/buffer_list');\n\nvar destroyImpl = require('./internal/streams/destroy');\n\nvar _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\n\nvar _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance.\n\n\nvar StringDecoder;\nvar createReadableStreamAsyncIterator;\nvar from;\n\nrequire('inherits')(Readable, Stream);\n\nvar errorOrDestroy = destroyImpl.errorOrDestroy;\nvar kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];\n\nfunction prependListener(emitter, event, fn) {\n // Sadly this is not cacheable as some libraries bundle their own\n // event emitter implementation with them.\n if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any\n // userland ones. NEVER DO THIS. This is here only because this code needs\n // to continue to work with older versions of Node.js that do not include\n // the prependListener() method. The goal is to eventually remove this hack.\n\n if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];\n}\n\nfunction ReadableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {}; // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream.\n // These options can be provided separately as readableXXX and writableXXX.\n\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to\n // make all the buffer merging and length checks go away\n\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer\n // Note: 0 is a valid value, means \"don't call _read preemptively ever\"\n\n this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the\n // linked list can remove elements from the beginning faster than\n // array.shift()\n\n this.buffer = new BufferList();\n this.length = 0;\n this.pipes = null;\n this.pipesCount = 0;\n this.flowing = null;\n this.ended = false;\n this.endEmitted = false;\n this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted\n // immediately, or on a later tick. We set this to true at first, because\n // any actions that shouldn't happen until \"later\" should generally also\n // not happen before the first read call.\n\n this.sync = true; // whenever we return null, then we set a flag to say\n // that we're awaiting a 'readable' event emission.\n\n this.needReadable = false;\n this.emittedReadable = false;\n this.readableListening = false;\n this.resumeScheduled = false;\n this.paused = true; // Should close be emitted on destroy. Defaults to true.\n\n this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish')\n\n this.autoDestroy = !!options.autoDestroy; // has it been destroyed\n\n this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n\n this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s\n\n this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled\n\n this.readingMore = false;\n this.decoder = null;\n this.encoding = null;\n\n if (options.encoding) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n this.decoder = new StringDecoder(options.encoding);\n this.encoding = options.encoding;\n }\n}\n\nfunction Readable(options) {\n Duplex = Duplex || require('./_stream_duplex');\n if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside\n // the ReadableState constructor, at least with V8 6.5\n\n var isDuplex = this instanceof Duplex;\n this._readableState = new ReadableState(options, this, isDuplex); // legacy\n\n this.readable = true;\n\n if (options) {\n if (typeof options.read === 'function') this._read = options.read;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n }\n\n Stream.call(this);\n}\n\nObject.defineProperty(Readable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n if (this._readableState === undefined) {\n return false;\n }\n\n return this._readableState.destroyed;\n },\n set: function set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._readableState) {\n return;\n } // backward compatibility, the user is explicitly\n // managing destroyed\n\n\n this._readableState.destroyed = value;\n }\n});\nReadable.prototype.destroy = destroyImpl.destroy;\nReadable.prototype._undestroy = destroyImpl.undestroy;\n\nReadable.prototype._destroy = function (err, cb) {\n cb(err);\n}; // Manually shove something into the read() buffer.\n// This returns true if the highWaterMark has not been hit yet,\n// similar to how Writable.write() returns true if you should\n// write() some more.\n\n\nReadable.prototype.push = function (chunk, encoding) {\n var state = this._readableState;\n var skipChunkCheck;\n\n if (!state.objectMode) {\n if (typeof chunk === 'string') {\n encoding = encoding || state.defaultEncoding;\n\n if (encoding !== state.encoding) {\n chunk = Buffer.from(chunk, encoding);\n encoding = '';\n }\n\n skipChunkCheck = true;\n }\n } else {\n skipChunkCheck = true;\n }\n\n return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);\n}; // Unshift should *always* be something directly out of read()\n\n\nReadable.prototype.unshift = function (chunk) {\n return readableAddChunk(this, chunk, null, true, false);\n};\n\nfunction readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {\n debug('readableAddChunk', chunk);\n var state = stream._readableState;\n\n if (chunk === null) {\n state.reading = false;\n onEofChunk(stream, state);\n } else {\n var er;\n if (!skipChunkCheck) er = chunkInvalid(state, chunk);\n\n if (er) {\n errorOrDestroy(stream, er);\n } else if (state.objectMode || chunk && chunk.length > 0) {\n if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (addToFront) {\n if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);\n } else if (state.ended) {\n errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());\n } else if (state.destroyed) {\n return false;\n } else {\n state.reading = false;\n\n if (state.decoder && !encoding) {\n chunk = state.decoder.write(chunk);\n if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);\n } else {\n addChunk(stream, state, chunk, false);\n }\n }\n } else if (!addToFront) {\n state.reading = false;\n maybeReadMore(stream, state);\n }\n } // We can push more data if we are below the highWaterMark.\n // Also, if we have no data yet, we can stand some more bytes.\n // This is to work around cases where hwm=0, such as the repl.\n\n\n return !state.ended && (state.length < state.highWaterMark || state.length === 0);\n}\n\nfunction addChunk(stream, state, chunk, addToFront) {\n if (state.flowing && state.length === 0 && !state.sync) {\n state.awaitDrain = 0;\n stream.emit('data', chunk);\n } else {\n // update the buffer info.\n state.length += state.objectMode ? 1 : chunk.length;\n if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);\n if (state.needReadable) emitReadable(stream);\n }\n\n maybeReadMore(stream, state);\n}\n\nfunction chunkInvalid(state, chunk) {\n var er;\n\n if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);\n }\n\n return er;\n}\n\nReadable.prototype.isPaused = function () {\n return this._readableState.flowing === false;\n}; // backwards compatibility.\n\n\nReadable.prototype.setEncoding = function (enc) {\n if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;\n var decoder = new StringDecoder(enc);\n this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8\n\n this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers:\n\n var p = this._readableState.buffer.head;\n var content = '';\n\n while (p !== null) {\n content += decoder.write(p.data);\n p = p.next;\n }\n\n this._readableState.buffer.clear();\n\n if (content !== '') this._readableState.buffer.push(content);\n this._readableState.length = content.length;\n return this;\n}; // Don't raise the hwm > 1GB\n\n\nvar MAX_HWM = 0x40000000;\n\nfunction computeNewHighWaterMark(n) {\n if (n >= MAX_HWM) {\n // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.\n n = MAX_HWM;\n } else {\n // Get the next highest power of 2 to prevent increasing hwm excessively in\n // tiny amounts\n n--;\n n |= n >>> 1;\n n |= n >>> 2;\n n |= n >>> 4;\n n |= n >>> 8;\n n |= n >>> 16;\n n++;\n }\n\n return n;\n} // This function is designed to be inlinable, so please take care when making\n// changes to the function body.\n\n\nfunction howMuchToRead(n, state) {\n if (n <= 0 || state.length === 0 && state.ended) return 0;\n if (state.objectMode) return 1;\n\n if (n !== n) {\n // Only flow one buffer at a time\n if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;\n } // If we're asking for more than the current hwm, then raise the hwm.\n\n\n if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);\n if (n <= state.length) return n; // Don't have enough\n\n if (!state.ended) {\n state.needReadable = true;\n return 0;\n }\n\n return state.length;\n} // you can override either this method, or the async _read(n) below.\n\n\nReadable.prototype.read = function (n) {\n debug('read', n);\n n = parseInt(n, 10);\n var state = this._readableState;\n var nOrig = n;\n if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we\n // already have a bunch of data in the buffer, then just trigger\n // the 'readable' event and move on.\n\n if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {\n debug('read: emitReadable', state.length, state.ended);\n if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);\n return null;\n }\n\n n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up.\n\n if (n === 0 && state.ended) {\n if (state.length === 0) endReadable(this);\n return null;\n } // All the actual chunk generation logic needs to be\n // *below* the call to _read. The reason is that in certain\n // synthetic stream cases, such as passthrough streams, _read\n // may be a completely synchronous operation which may change\n // the state of the read buffer, providing enough data when\n // before there was *not* enough.\n //\n // So, the steps are:\n // 1. Figure out what the state of things will be after we do\n // a read from the buffer.\n //\n // 2. If that resulting state will trigger a _read, then call _read.\n // Note that this may be asynchronous, or synchronous. Yes, it is\n // deeply ugly to write APIs this way, but that still doesn't mean\n // that the Readable class should behave improperly, as streams are\n // designed to be sync/async agnostic.\n // Take note if the _read call is sync or async (ie, if the read call\n // has returned yet), so that we know whether or not it's safe to emit\n // 'readable' etc.\n //\n // 3. Actually pull the requested chunks out of the buffer and return.\n // if we need a readable event, then we need to do some reading.\n\n\n var doRead = state.needReadable;\n debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some\n\n if (state.length === 0 || state.length - n < state.highWaterMark) {\n doRead = true;\n debug('length less than watermark', doRead);\n } // however, if we've ended, then there's no point, and if we're already\n // reading, then it's unnecessary.\n\n\n if (state.ended || state.reading) {\n doRead = false;\n debug('reading or ended', doRead);\n } else if (doRead) {\n debug('do read');\n state.reading = true;\n state.sync = true; // if the length is currently zero, then we *need* a readable event.\n\n if (state.length === 0) state.needReadable = true; // call internal read method\n\n this._read(state.highWaterMark);\n\n state.sync = false; // If _read pushed data synchronously, then `reading` will be false,\n // and we need to re-evaluate how much data we can return to the user.\n\n if (!state.reading) n = howMuchToRead(nOrig, state);\n }\n\n var ret;\n if (n > 0) ret = fromList(n, state);else ret = null;\n\n if (ret === null) {\n state.needReadable = state.length <= state.highWaterMark;\n n = 0;\n } else {\n state.length -= n;\n state.awaitDrain = 0;\n }\n\n if (state.length === 0) {\n // If we have nothing in the buffer, then we want to know\n // as soon as we *do* get something into the buffer.\n if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick.\n\n if (nOrig !== n && state.ended) endReadable(this);\n }\n\n if (ret !== null) this.emit('data', ret);\n return ret;\n};\n\nfunction onEofChunk(stream, state) {\n debug('onEofChunk');\n if (state.ended) return;\n\n if (state.decoder) {\n var chunk = state.decoder.end();\n\n if (chunk && chunk.length) {\n state.buffer.push(chunk);\n state.length += state.objectMode ? 1 : chunk.length;\n }\n }\n\n state.ended = true;\n\n if (state.sync) {\n // if we are sync, wait until next tick to emit the data.\n // Otherwise we risk emitting data in the flow()\n // the readable code triggers during a read() call\n emitReadable(stream);\n } else {\n // emit 'readable' now to make sure it gets picked up.\n state.needReadable = false;\n\n if (!state.emittedReadable) {\n state.emittedReadable = true;\n emitReadable_(stream);\n }\n }\n} // Don't emit readable right away in sync mode, because this can trigger\n// another read() call => stack overflow. This way, it might trigger\n// a nextTick recursion warning, but that's not so bad.\n\n\nfunction emitReadable(stream) {\n var state = stream._readableState;\n debug('emitReadable', state.needReadable, state.emittedReadable);\n state.needReadable = false;\n\n if (!state.emittedReadable) {\n debug('emitReadable', state.flowing);\n state.emittedReadable = true;\n process.nextTick(emitReadable_, stream);\n }\n}\n\nfunction emitReadable_(stream) {\n var state = stream._readableState;\n debug('emitReadable_', state.destroyed, state.length, state.ended);\n\n if (!state.destroyed && (state.length || state.ended)) {\n stream.emit('readable');\n state.emittedReadable = false;\n } // The stream needs another readable event if\n // 1. It is not flowing, as the flow mechanism will take\n // care of it.\n // 2. It is not ended.\n // 3. It is below the highWaterMark, so we can schedule\n // another readable later.\n\n\n state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;\n flow(stream);\n} // at this point, the user has presumably seen the 'readable' event,\n// and called read() to consume some data. that may have triggered\n// in turn another _read(n) call, in which case reading = true if\n// it's in progress.\n// However, if we're not ended, or reading, and the length < hwm,\n// then go ahead and try to read some more preemptively.\n\n\nfunction maybeReadMore(stream, state) {\n if (!state.readingMore) {\n state.readingMore = true;\n process.nextTick(maybeReadMore_, stream, state);\n }\n}\n\nfunction maybeReadMore_(stream, state) {\n // Attempt to read more data if we should.\n //\n // The conditions for reading more data are (one of):\n // - Not enough data buffered (state.length < state.highWaterMark). The loop\n // is responsible for filling the buffer with enough data if such data\n // is available. If highWaterMark is 0 and we are not in the flowing mode\n // we should _not_ attempt to buffer any extra data. We'll get more data\n // when the stream consumer calls read() instead.\n // - No data in the buffer, and the stream is in flowing mode. In this mode\n // the loop below is responsible for ensuring read() is called. Failing to\n // call read here would abort the flow and there's no other mechanism for\n // continuing the flow if the stream consumer has just subscribed to the\n // 'data' event.\n //\n // In addition to the above conditions to keep reading data, the following\n // conditions prevent the data from being read:\n // - The stream has ended (state.ended).\n // - There is already a pending 'read' operation (state.reading). This is a\n // case where the the stream has called the implementation defined _read()\n // method, but they are processing the call asynchronously and have _not_\n // called push() with new data. In this case we skip performing more\n // read()s. The execution ends in this method again after the _read() ends\n // up calling push() with more data.\n while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {\n var len = state.length;\n debug('maybeReadMore read 0');\n stream.read(0);\n if (len === state.length) // didn't get any data, stop spinning.\n break;\n }\n\n state.readingMore = false;\n} // abstract method. to be overridden in specific implementation classes.\n// call cb(er, data) where data is <= n in length.\n// for virtual (non-string, non-buffer) streams, \"length\" is somewhat\n// arbitrary, and perhaps not very meaningful.\n\n\nReadable.prototype._read = function (n) {\n errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));\n};\n\nReadable.prototype.pipe = function (dest, pipeOpts) {\n var src = this;\n var state = this._readableState;\n\n switch (state.pipesCount) {\n case 0:\n state.pipes = dest;\n break;\n\n case 1:\n state.pipes = [state.pipes, dest];\n break;\n\n default:\n state.pipes.push(dest);\n break;\n }\n\n state.pipesCount += 1;\n debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);\n var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;\n var endFn = doEnd ? onend : unpipe;\n if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);\n dest.on('unpipe', onunpipe);\n\n function onunpipe(readable, unpipeInfo) {\n debug('onunpipe');\n\n if (readable === src) {\n if (unpipeInfo && unpipeInfo.hasUnpiped === false) {\n unpipeInfo.hasUnpiped = true;\n cleanup();\n }\n }\n }\n\n function onend() {\n debug('onend');\n dest.end();\n } // when the dest drains, it reduces the awaitDrain counter\n // on the source. This would be more elegant with a .once()\n // handler in flow(), but adding and removing repeatedly is\n // too slow.\n\n\n var ondrain = pipeOnDrain(src);\n dest.on('drain', ondrain);\n var cleanedUp = false;\n\n function cleanup() {\n debug('cleanup'); // cleanup event handlers once the pipe is broken\n\n dest.removeListener('close', onclose);\n dest.removeListener('finish', onfinish);\n dest.removeListener('drain', ondrain);\n dest.removeListener('error', onerror);\n dest.removeListener('unpipe', onunpipe);\n src.removeListener('end', onend);\n src.removeListener('end', unpipe);\n src.removeListener('data', ondata);\n cleanedUp = true; // if the reader is waiting for a drain event from this\n // specific writer, then it would cause it to never start\n // flowing again.\n // So, if this is awaiting a drain, then we just call it now.\n // If we don't know, then assume that we are waiting for one.\n\n if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();\n }\n\n src.on('data', ondata);\n\n function ondata(chunk) {\n debug('ondata');\n var ret = dest.write(chunk);\n debug('dest.write', ret);\n\n if (ret === false) {\n // If the user unpiped during `dest.write()`, it is possible\n // to get stuck in a permanently paused state if that write\n // also returned false.\n // => Check whether `dest` is still a piping destination.\n if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {\n debug('false write response, pause', state.awaitDrain);\n state.awaitDrain++;\n }\n\n src.pause();\n }\n } // if the dest has an error, then stop piping into it.\n // however, don't suppress the throwing behavior for this.\n\n\n function onerror(er) {\n debug('onerror', er);\n unpipe();\n dest.removeListener('error', onerror);\n if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);\n } // Make sure our error handler is attached before userland ones.\n\n\n prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once.\n\n function onclose() {\n dest.removeListener('finish', onfinish);\n unpipe();\n }\n\n dest.once('close', onclose);\n\n function onfinish() {\n debug('onfinish');\n dest.removeListener('close', onclose);\n unpipe();\n }\n\n dest.once('finish', onfinish);\n\n function unpipe() {\n debug('unpipe');\n src.unpipe(dest);\n } // tell the dest that it's being piped to\n\n\n dest.emit('pipe', src); // start the flow if it hasn't been started already.\n\n if (!state.flowing) {\n debug('pipe resume');\n src.resume();\n }\n\n return dest;\n};\n\nfunction pipeOnDrain(src) {\n return function pipeOnDrainFunctionResult() {\n var state = src._readableState;\n debug('pipeOnDrain', state.awaitDrain);\n if (state.awaitDrain) state.awaitDrain--;\n\n if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {\n state.flowing = true;\n flow(src);\n }\n };\n}\n\nReadable.prototype.unpipe = function (dest) {\n var state = this._readableState;\n var unpipeInfo = {\n hasUnpiped: false\n }; // if we're not piping anywhere, then do nothing.\n\n if (state.pipesCount === 0) return this; // just one destination. most common case.\n\n if (state.pipesCount === 1) {\n // passed in one, but it's not the right one.\n if (dest && dest !== state.pipes) return this;\n if (!dest) dest = state.pipes; // got a match.\n\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n if (dest) dest.emit('unpipe', this, unpipeInfo);\n return this;\n } // slow case. multiple pipe destinations.\n\n\n if (!dest) {\n // remove all.\n var dests = state.pipes;\n var len = state.pipesCount;\n state.pipes = null;\n state.pipesCount = 0;\n state.flowing = false;\n\n for (var i = 0; i < len; i++) {\n dests[i].emit('unpipe', this, {\n hasUnpiped: false\n });\n }\n\n return this;\n } // try to find the right one.\n\n\n var index = indexOf(state.pipes, dest);\n if (index === -1) return this;\n state.pipes.splice(index, 1);\n state.pipesCount -= 1;\n if (state.pipesCount === 1) state.pipes = state.pipes[0];\n dest.emit('unpipe', this, unpipeInfo);\n return this;\n}; // set up data events if they are asked for\n// Ensure readable listeners eventually get something\n\n\nReadable.prototype.on = function (ev, fn) {\n var res = Stream.prototype.on.call(this, ev, fn);\n var state = this._readableState;\n\n if (ev === 'data') {\n // update readableListening so that resume() may be a no-op\n // a few lines down. This is needed to support once('readable').\n state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused\n\n if (state.flowing !== false) this.resume();\n } else if (ev === 'readable') {\n if (!state.endEmitted && !state.readableListening) {\n state.readableListening = state.needReadable = true;\n state.flowing = false;\n state.emittedReadable = false;\n debug('on readable', state.length, state.reading);\n\n if (state.length) {\n emitReadable(this);\n } else if (!state.reading) {\n process.nextTick(nReadingNextTick, this);\n }\n }\n }\n\n return res;\n};\n\nReadable.prototype.addListener = Readable.prototype.on;\n\nReadable.prototype.removeListener = function (ev, fn) {\n var res = Stream.prototype.removeListener.call(this, ev, fn);\n\n if (ev === 'readable') {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n\n return res;\n};\n\nReadable.prototype.removeAllListeners = function (ev) {\n var res = Stream.prototype.removeAllListeners.apply(this, arguments);\n\n if (ev === 'readable' || ev === undefined) {\n // We need to check if there is someone still listening to\n // readable and reset the state. However this needs to happen\n // after readable has been emitted but before I/O (nextTick) to\n // support once('readable', fn) cycles. This means that calling\n // resume within the same tick will have no\n // effect.\n process.nextTick(updateReadableListening, this);\n }\n\n return res;\n};\n\nfunction updateReadableListening(self) {\n var state = self._readableState;\n state.readableListening = self.listenerCount('readable') > 0;\n\n if (state.resumeScheduled && !state.paused) {\n // flowing needs to be set to true now, otherwise\n // the upcoming resume will not flow.\n state.flowing = true; // crude way to check if we should resume\n } else if (self.listenerCount('data') > 0) {\n self.resume();\n }\n}\n\nfunction nReadingNextTick(self) {\n debug('readable nexttick read 0');\n self.read(0);\n} // pause() and resume() are remnants of the legacy readable stream API\n// If the user uses them, then switch into old mode.\n\n\nReadable.prototype.resume = function () {\n var state = this._readableState;\n\n if (!state.flowing) {\n debug('resume'); // we flow only if there is no one listening\n // for readable, but we still have to call\n // resume()\n\n state.flowing = !state.readableListening;\n resume(this, state);\n }\n\n state.paused = false;\n return this;\n};\n\nfunction resume(stream, state) {\n if (!state.resumeScheduled) {\n state.resumeScheduled = true;\n process.nextTick(resume_, stream, state);\n }\n}\n\nfunction resume_(stream, state) {\n debug('resume', state.reading);\n\n if (!state.reading) {\n stream.read(0);\n }\n\n state.resumeScheduled = false;\n stream.emit('resume');\n flow(stream);\n if (state.flowing && !state.reading) stream.read(0);\n}\n\nReadable.prototype.pause = function () {\n debug('call pause flowing=%j', this._readableState.flowing);\n\n if (this._readableState.flowing !== false) {\n debug('pause');\n this._readableState.flowing = false;\n this.emit('pause');\n }\n\n this._readableState.paused = true;\n return this;\n};\n\nfunction flow(stream) {\n var state = stream._readableState;\n debug('flow', state.flowing);\n\n while (state.flowing && stream.read() !== null) {\n ;\n }\n} // wrap an old-style stream as the async data source.\n// This is *not* part of the readable stream interface.\n// It is an ugly unfortunate mess of history.\n\n\nReadable.prototype.wrap = function (stream) {\n var _this = this;\n\n var state = this._readableState;\n var paused = false;\n stream.on('end', function () {\n debug('wrapped end');\n\n if (state.decoder && !state.ended) {\n var chunk = state.decoder.end();\n if (chunk && chunk.length) _this.push(chunk);\n }\n\n _this.push(null);\n });\n stream.on('data', function (chunk) {\n debug('wrapped data');\n if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode\n\n if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;\n\n var ret = _this.push(chunk);\n\n if (!ret) {\n paused = true;\n stream.pause();\n }\n }); // proxy all the other methods.\n // important when wrapping filters and duplexes.\n\n for (var i in stream) {\n if (this[i] === undefined && typeof stream[i] === 'function') {\n this[i] = function methodWrap(method) {\n return function methodWrapReturnFunction() {\n return stream[method].apply(stream, arguments);\n };\n }(i);\n }\n } // proxy certain important events.\n\n\n for (var n = 0; n < kProxyEvents.length; n++) {\n stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));\n } // when we try to consume some more bytes, simply unpause the\n // underlying stream.\n\n\n this._read = function (n) {\n debug('wrapped _read', n);\n\n if (paused) {\n paused = false;\n stream.resume();\n }\n };\n\n return this;\n};\n\nif (typeof Symbol === 'function') {\n Readable.prototype[Symbol.asyncIterator] = function () {\n if (createReadableStreamAsyncIterator === undefined) {\n createReadableStreamAsyncIterator = require('./internal/streams/async_iterator');\n }\n\n return createReadableStreamAsyncIterator(this);\n };\n}\n\nObject.defineProperty(Readable.prototype, 'readableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.highWaterMark;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState && this._readableState.buffer;\n }\n});\nObject.defineProperty(Readable.prototype, 'readableFlowing', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.flowing;\n },\n set: function set(state) {\n if (this._readableState) {\n this._readableState.flowing = state;\n }\n }\n}); // exposed for testing purposes only.\n\nReadable._fromList = fromList;\nObject.defineProperty(Readable.prototype, 'readableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._readableState.length;\n }\n}); // Pluck off n bytes from an array of buffers.\n// Length is the combined lengths of all the buffers in the list.\n// This function is designed to be inlinable, so please take care when making\n// changes to the function body.\n\nfunction fromList(n, state) {\n // nothing buffered\n if (state.length === 0) return null;\n var ret;\n if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {\n // read it all, truncate the list\n if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);\n state.buffer.clear();\n } else {\n // read part of list\n ret = state.buffer.consume(n, state.decoder);\n }\n return ret;\n}\n\nfunction endReadable(stream) {\n var state = stream._readableState;\n debug('endReadable', state.endEmitted);\n\n if (!state.endEmitted) {\n state.ended = true;\n process.nextTick(endReadableNT, state, stream);\n }\n}\n\nfunction endReadableNT(state, stream) {\n debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift.\n\n if (!state.endEmitted && state.length === 0) {\n state.endEmitted = true;\n stream.readable = false;\n stream.emit('end');\n\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the writable side is ready for autoDestroy as well\n var wState = stream._writableState;\n\n if (!wState || wState.autoDestroy && wState.finished) {\n stream.destroy();\n }\n }\n }\n}\n\nif (typeof Symbol === 'function') {\n Readable.from = function (iterable, opts) {\n if (from === undefined) {\n from = require('./internal/streams/from');\n }\n\n return from(Readable, iterable, opts);\n };\n}\n\nfunction indexOf(xs, x) {\n for (var i = 0, l = xs.length; i < l; i++) {\n if (xs[i] === x) return i;\n }\n\n return -1;\n}","'use strict'; // undocumented cb() API, needed for core, not for public API\n\nfunction destroy(err, cb) {\n var _this = this;\n\n var readableDestroyed = this._readableState && this._readableState.destroyed;\n var writableDestroyed = this._writableState && this._writableState.destroyed;\n\n if (readableDestroyed || writableDestroyed) {\n if (cb) {\n cb(err);\n } else if (err) {\n if (!this._writableState) {\n process.nextTick(emitErrorNT, this, err);\n } else if (!this._writableState.errorEmitted) {\n this._writableState.errorEmitted = true;\n process.nextTick(emitErrorNT, this, err);\n }\n }\n\n return this;\n } // we set destroyed to true before firing error callbacks in order\n // to make it re-entrance safe in case destroy() is called within callbacks\n\n\n if (this._readableState) {\n this._readableState.destroyed = true;\n } // if this is a duplex stream mark the writable part as destroyed as well\n\n\n if (this._writableState) {\n this._writableState.destroyed = true;\n }\n\n this._destroy(err || null, function (err) {\n if (!cb && err) {\n if (!_this._writableState) {\n process.nextTick(emitErrorAndCloseNT, _this, err);\n } else if (!_this._writableState.errorEmitted) {\n _this._writableState.errorEmitted = true;\n process.nextTick(emitErrorAndCloseNT, _this, err);\n } else {\n process.nextTick(emitCloseNT, _this);\n }\n } else if (cb) {\n process.nextTick(emitCloseNT, _this);\n cb(err);\n } else {\n process.nextTick(emitCloseNT, _this);\n }\n });\n\n return this;\n}\n\nfunction emitErrorAndCloseNT(self, err) {\n emitErrorNT(self, err);\n emitCloseNT(self);\n}\n\nfunction emitCloseNT(self) {\n if (self._writableState && !self._writableState.emitClose) return;\n if (self._readableState && !self._readableState.emitClose) return;\n self.emit('close');\n}\n\nfunction undestroy() {\n if (this._readableState) {\n this._readableState.destroyed = false;\n this._readableState.reading = false;\n this._readableState.ended = false;\n this._readableState.endEmitted = false;\n }\n\n if (this._writableState) {\n this._writableState.destroyed = false;\n this._writableState.ended = false;\n this._writableState.ending = false;\n this._writableState.finalCalled = false;\n this._writableState.prefinished = false;\n this._writableState.finished = false;\n this._writableState.errorEmitted = false;\n }\n}\n\nfunction emitErrorNT(self, err) {\n self.emit('error', err);\n}\n\nfunction errorOrDestroy(stream, err) {\n // We have tests that rely on errors being emitted\n // in the same tick, so changing this is semver major.\n // For now when you opt-in to autoDestroy we allow\n // the error to be emitted nextTick. In a future\n // semver major update we should change the default to this.\n var rState = stream._readableState;\n var wState = stream._writableState;\n if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);\n}\n\nmodule.exports = {\n destroy: destroy,\n undestroy: undestroy,\n errorOrDestroy: errorOrDestroy\n};","// Ported from https://github.com/mafintosh/pump with\n// permission from the author, Mathias Buus (@mafintosh).\n'use strict';\n\nvar eos;\n\nfunction once(callback) {\n var called = false;\n return function () {\n if (called) return;\n called = true;\n callback.apply(void 0, arguments);\n };\n}\n\nvar _require$codes = require('../../../errors').codes,\n ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;\n\nfunction noop(err) {\n // Rethrow the error if it exists to avoid swallowing it\n if (err) throw err;\n}\n\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\n\nfunction destroyer(stream, reading, writing, callback) {\n callback = once(callback);\n var closed = false;\n stream.on('close', function () {\n closed = true;\n });\n if (eos === undefined) eos = require('./end-of-stream');\n eos(stream, {\n readable: reading,\n writable: writing\n }, function (err) {\n if (err) return callback(err);\n closed = true;\n callback();\n });\n var destroyed = false;\n return function (err) {\n if (closed) return;\n if (destroyed) return;\n destroyed = true; // request.destroy just do .end - .abort is what we want\n\n if (isRequest(stream)) return stream.abort();\n if (typeof stream.destroy === 'function') return stream.destroy();\n callback(err || new ERR_STREAM_DESTROYED('pipe'));\n };\n}\n\nfunction call(fn) {\n fn();\n}\n\nfunction pipe(from, to) {\n return from.pipe(to);\n}\n\nfunction popCallback(streams) {\n if (!streams.length) return noop;\n if (typeof streams[streams.length - 1] !== 'function') return noop;\n return streams.pop();\n}\n\nfunction pipeline() {\n for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {\n streams[_key] = arguments[_key];\n }\n\n var callback = popCallback(streams);\n if (Array.isArray(streams[0])) streams = streams[0];\n\n if (streams.length < 2) {\n throw new ERR_MISSING_ARGS('streams');\n }\n\n var error;\n var destroys = streams.map(function (stream, i) {\n var reading = i < streams.length - 1;\n var writing = i > 0;\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err;\n if (err) destroys.forEach(call);\n if (reading) return;\n destroys.forEach(call);\n callback(error);\n });\n });\n return streams.reduce(pipe);\n}\n\nmodule.exports = pipeline;","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n// A bit simpler than readable streams.\n// Implement an async ._write(chunk, encoding, cb), and it'll handle all\n// the drain event emission and buffering.\n'use strict';\n\nmodule.exports = Writable;\n/* */\n\nfunction WriteReq(chunk, encoding, cb) {\n this.chunk = chunk;\n this.encoding = encoding;\n this.callback = cb;\n this.next = null;\n} // It seems a linked list but it is not\n// there will be only 2 of these for each stream\n\n\nfunction CorkedRequest(state) {\n var _this = this;\n\n this.next = null;\n this.entry = null;\n\n this.finish = function () {\n onCorkedFinish(_this, state);\n };\n}\n/* */\n\n/**/\n\n\nvar Duplex;\n/**/\n\nWritable.WritableState = WritableState;\n/**/\n\nvar internalUtil = {\n deprecate: require('util-deprecate')\n};\n/**/\n\n/**/\n\nvar Stream = require('./internal/streams/stream');\n/**/\n\n\nvar Buffer = require('buffer').Buffer;\n\nvar OurUint8Array = global.Uint8Array || function () {};\n\nfunction _uint8ArrayToBuffer(chunk) {\n return Buffer.from(chunk);\n}\n\nfunction _isUint8Array(obj) {\n return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;\n}\n\nvar destroyImpl = require('./internal/streams/destroy');\n\nvar _require = require('./internal/streams/state'),\n getHighWaterMark = _require.getHighWaterMark;\n\nvar _require$codes = require('../errors').codes,\n ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,\n ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,\n ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,\n ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,\n ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;\n\nvar errorOrDestroy = destroyImpl.errorOrDestroy;\n\nrequire('inherits')(Writable, Stream);\n\nfunction nop() {}\n\nfunction WritableState(options, stream, isDuplex) {\n Duplex = Duplex || require('./_stream_duplex');\n options = options || {}; // Duplex streams are both readable and writable, but share\n // the same options object.\n // However, some cases require setting options to different\n // values for the readable and the writable sides of the duplex stream,\n // e.g. options.readableObjectMode vs. options.writableObjectMode, etc.\n\n if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream\n // contains buffers or objects.\n\n this.objectMode = !!options.objectMode;\n if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false\n // Note: 0 is a valid value, means that we always return false if\n // the entire buffer is not flushed immediately on write()\n\n this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called\n\n this.finalCalled = false; // drain event flag.\n\n this.needDrain = false; // at the start of calling end()\n\n this.ending = false; // when end() has been called, and returned\n\n this.ended = false; // when 'finish' is emitted\n\n this.finished = false; // has it been destroyed\n\n this.destroyed = false; // should we decode strings into buffers before passing to _write?\n // this is here so that some node-core streams can optimize string\n // handling at a lower level.\n\n var noDecode = options.decodeStrings === false;\n this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string\n // encoding is 'binary' so we have to make this configurable.\n // Everything else in the universe uses 'utf8', though.\n\n this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement\n // of how much we're waiting to get pushed to some underlying\n // socket or file.\n\n this.length = 0; // a flag to see when we're in the middle of a write.\n\n this.writing = false; // when true all writes will be buffered until .uncork() call\n\n this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,\n // or on a later tick. We set this to true at first, because any\n // actions that shouldn't happen until \"later\" should generally also\n // not happen before the first write call.\n\n this.sync = true; // a flag to know if we're processing previously buffered items, which\n // may call the _write() callback in the same tick, so that we don't\n // end up in an overlapped onwrite situation.\n\n this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)\n\n this.onwrite = function (er) {\n onwrite(stream, er);\n }; // the callback that the user supplies to write(chunk,encoding,cb)\n\n\n this.writecb = null; // the amount that is being written when _write is called.\n\n this.writelen = 0;\n this.bufferedRequest = null;\n this.lastBufferedRequest = null; // number of pending user-supplied write callbacks\n // this must be 0 before 'finish' can be emitted\n\n this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs\n // This is relevant for synchronous Transform streams\n\n this.prefinished = false; // True if the error was already emitted and should not be thrown again\n\n this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.\n\n this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')\n\n this.autoDestroy = !!options.autoDestroy; // count buffered requests\n\n this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always\n // one allocated and free to use, and we maintain at most two\n\n this.corkedRequestsFree = new CorkedRequest(this);\n}\n\nWritableState.prototype.getBuffer = function getBuffer() {\n var current = this.bufferedRequest;\n var out = [];\n\n while (current) {\n out.push(current);\n current = current.next;\n }\n\n return out;\n};\n\n(function () {\n try {\n Object.defineProperty(WritableState.prototype, 'buffer', {\n get: internalUtil.deprecate(function writableStateBufferGetter() {\n return this.getBuffer();\n }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')\n });\n } catch (_) {}\n})(); // Test _writableState for inheritance to account for Duplex streams,\n// whose prototype chain only points to Readable.\n\n\nvar realHasInstance;\n\nif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {\n realHasInstance = Function.prototype[Symbol.hasInstance];\n Object.defineProperty(Writable, Symbol.hasInstance, {\n value: function value(object) {\n if (realHasInstance.call(this, object)) return true;\n if (this !== Writable) return false;\n return object && object._writableState instanceof WritableState;\n }\n });\n} else {\n realHasInstance = function realHasInstance(object) {\n return object instanceof this;\n };\n}\n\nfunction Writable(options) {\n Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.\n // `realHasInstance` is necessary because using plain `instanceof`\n // would return false, as no `_writableState` property is attached.\n // Trying to use the custom `instanceof` for Writable here will also break the\n // Node.js LazyTransform implementation, which has a non-trivial getter for\n // `_writableState` that would lead to infinite recursion.\n // Checking for a Stream.Duplex instance is faster here instead of inside\n // the WritableState constructor, at least with V8 6.5\n\n var isDuplex = this instanceof Duplex;\n if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);\n this._writableState = new WritableState(options, this, isDuplex); // legacy.\n\n this.writable = true;\n\n if (options) {\n if (typeof options.write === 'function') this._write = options.write;\n if (typeof options.writev === 'function') this._writev = options.writev;\n if (typeof options.destroy === 'function') this._destroy = options.destroy;\n if (typeof options.final === 'function') this._final = options.final;\n }\n\n Stream.call(this);\n} // Otherwise people can pipe Writable streams, which is just wrong.\n\n\nWritable.prototype.pipe = function () {\n errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());\n};\n\nfunction writeAfterEnd(stream, cb) {\n var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb\n\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n} // Checks that a user-supplied chunk is valid, especially for the particular\n// mode the stream is in. Currently this means that `null` is never accepted\n// and undefined/non-string values are only allowed in object mode.\n\n\nfunction validChunk(stream, state, chunk, cb) {\n var er;\n\n if (chunk === null) {\n er = new ERR_STREAM_NULL_VALUES();\n } else if (typeof chunk !== 'string' && !state.objectMode) {\n er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);\n }\n\n if (er) {\n errorOrDestroy(stream, er);\n process.nextTick(cb, er);\n return false;\n }\n\n return true;\n}\n\nWritable.prototype.write = function (chunk, encoding, cb) {\n var state = this._writableState;\n var ret = false;\n\n var isBuf = !state.objectMode && _isUint8Array(chunk);\n\n if (isBuf && !Buffer.isBuffer(chunk)) {\n chunk = _uint8ArrayToBuffer(chunk);\n }\n\n if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;\n if (typeof cb !== 'function') cb = nop;\n if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {\n state.pendingcb++;\n ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);\n }\n return ret;\n};\n\nWritable.prototype.cork = function () {\n this._writableState.corked++;\n};\n\nWritable.prototype.uncork = function () {\n var state = this._writableState;\n\n if (state.corked) {\n state.corked--;\n if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);\n }\n};\n\nWritable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {\n // node::ParseEncoding() requires lower case.\n if (typeof encoding === 'string') encoding = encoding.toLowerCase();\n if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);\n this._writableState.defaultEncoding = encoding;\n return this;\n};\n\nObject.defineProperty(Writable.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\n\nfunction decodeChunk(state, chunk, encoding) {\n if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {\n chunk = Buffer.from(chunk, encoding);\n }\n\n return chunk;\n}\n\nObject.defineProperty(Writable.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n}); // if we're already writing something, then just put this\n// in the queue, and wait our turn. Otherwise, call _write\n// If we return false, then we need a drain event, so set that flag.\n\nfunction writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {\n if (!isBuf) {\n var newChunk = decodeChunk(state, chunk, encoding);\n\n if (chunk !== newChunk) {\n isBuf = true;\n encoding = 'buffer';\n chunk = newChunk;\n }\n }\n\n var len = state.objectMode ? 1 : chunk.length;\n state.length += len;\n var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.\n\n if (!ret) state.needDrain = true;\n\n if (state.writing || state.corked) {\n var last = state.lastBufferedRequest;\n state.lastBufferedRequest = {\n chunk: chunk,\n encoding: encoding,\n isBuf: isBuf,\n callback: cb,\n next: null\n };\n\n if (last) {\n last.next = state.lastBufferedRequest;\n } else {\n state.bufferedRequest = state.lastBufferedRequest;\n }\n\n state.bufferedRequestCount += 1;\n } else {\n doWrite(stream, state, false, len, chunk, encoding, cb);\n }\n\n return ret;\n}\n\nfunction doWrite(stream, state, writev, len, chunk, encoding, cb) {\n state.writelen = len;\n state.writecb = cb;\n state.writing = true;\n state.sync = true;\n if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);\n state.sync = false;\n}\n\nfunction onwriteError(stream, state, sync, er, cb) {\n --state.pendingcb;\n\n if (sync) {\n // defer the callback if we are being called synchronously\n // to avoid piling up things on the stack\n process.nextTick(cb, er); // this can emit finish, and it will always happen\n // after error\n\n process.nextTick(finishMaybe, stream, state);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er);\n } else {\n // the caller expect this to happen before if\n // it is async\n cb(er);\n stream._writableState.errorEmitted = true;\n errorOrDestroy(stream, er); // this can emit finish, but finish must\n // always follow error\n\n finishMaybe(stream, state);\n }\n}\n\nfunction onwriteStateUpdate(state) {\n state.writing = false;\n state.writecb = null;\n state.length -= state.writelen;\n state.writelen = 0;\n}\n\nfunction onwrite(stream, er) {\n var state = stream._writableState;\n var sync = state.sync;\n var cb = state.writecb;\n if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();\n onwriteStateUpdate(state);\n if (er) onwriteError(stream, state, sync, er, cb);else {\n // Check if we're actually ready to finish, but don't emit yet\n var finished = needFinish(state) || stream.destroyed;\n\n if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {\n clearBuffer(stream, state);\n }\n\n if (sync) {\n process.nextTick(afterWrite, stream, state, finished, cb);\n } else {\n afterWrite(stream, state, finished, cb);\n }\n }\n}\n\nfunction afterWrite(stream, state, finished, cb) {\n if (!finished) onwriteDrain(stream, state);\n state.pendingcb--;\n cb();\n finishMaybe(stream, state);\n} // Must force callback to be called on nextTick, so that we don't\n// emit 'drain' before the write() consumer gets the 'false' return\n// value, and has a chance to attach a 'drain' listener.\n\n\nfunction onwriteDrain(stream, state) {\n if (state.length === 0 && state.needDrain) {\n state.needDrain = false;\n stream.emit('drain');\n }\n} // if there's something in the buffer waiting, then process it\n\n\nfunction clearBuffer(stream, state) {\n state.bufferProcessing = true;\n var entry = state.bufferedRequest;\n\n if (stream._writev && entry && entry.next) {\n // Fast case, write everything using _writev()\n var l = state.bufferedRequestCount;\n var buffer = new Array(l);\n var holder = state.corkedRequestsFree;\n holder.entry = entry;\n var count = 0;\n var allBuffers = true;\n\n while (entry) {\n buffer[count] = entry;\n if (!entry.isBuf) allBuffers = false;\n entry = entry.next;\n count += 1;\n }\n\n buffer.allBuffers = allBuffers;\n doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time\n // as the hot path ends with doWrite\n\n state.pendingcb++;\n state.lastBufferedRequest = null;\n\n if (holder.next) {\n state.corkedRequestsFree = holder.next;\n holder.next = null;\n } else {\n state.corkedRequestsFree = new CorkedRequest(state);\n }\n\n state.bufferedRequestCount = 0;\n } else {\n // Slow case, write chunks one-by-one\n while (entry) {\n var chunk = entry.chunk;\n var encoding = entry.encoding;\n var cb = entry.callback;\n var len = state.objectMode ? 1 : chunk.length;\n doWrite(stream, state, false, len, chunk, encoding, cb);\n entry = entry.next;\n state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then\n // it means that we need to wait until it does.\n // also, that means that the chunk and cb are currently\n // being processed, so move the buffer counter past them.\n\n if (state.writing) {\n break;\n }\n }\n\n if (entry === null) state.lastBufferedRequest = null;\n }\n\n state.bufferedRequest = entry;\n state.bufferProcessing = false;\n}\n\nWritable.prototype._write = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));\n};\n\nWritable.prototype._writev = null;\n\nWritable.prototype.end = function (chunk, encoding, cb) {\n var state = this._writableState;\n\n if (typeof chunk === 'function') {\n cb = chunk;\n chunk = null;\n encoding = null;\n } else if (typeof encoding === 'function') {\n cb = encoding;\n encoding = null;\n }\n\n if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks\n\n if (state.corked) {\n state.corked = 1;\n this.uncork();\n } // ignore unnecessary end() calls.\n\n\n if (!state.ending) endWritable(this, state, cb);\n return this;\n};\n\nObject.defineProperty(Writable.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.length;\n }\n});\n\nfunction needFinish(state) {\n return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;\n}\n\nfunction callFinal(stream, state) {\n stream._final(function (err) {\n state.pendingcb--;\n\n if (err) {\n errorOrDestroy(stream, err);\n }\n\n state.prefinished = true;\n stream.emit('prefinish');\n finishMaybe(stream, state);\n });\n}\n\nfunction prefinish(stream, state) {\n if (!state.prefinished && !state.finalCalled) {\n if (typeof stream._final === 'function' && !state.destroyed) {\n state.pendingcb++;\n state.finalCalled = true;\n process.nextTick(callFinal, stream, state);\n } else {\n state.prefinished = true;\n stream.emit('prefinish');\n }\n }\n}\n\nfunction finishMaybe(stream, state) {\n var need = needFinish(state);\n\n if (need) {\n prefinish(stream, state);\n\n if (state.pendingcb === 0) {\n state.finished = true;\n stream.emit('finish');\n\n if (state.autoDestroy) {\n // In case of duplex streams we need a way to detect\n // if the readable side is ready for autoDestroy as well\n var rState = stream._readableState;\n\n if (!rState || rState.autoDestroy && rState.endEmitted) {\n stream.destroy();\n }\n }\n }\n }\n\n return need;\n}\n\nfunction endWritable(stream, state, cb) {\n state.ending = true;\n finishMaybe(stream, state);\n\n if (cb) {\n if (state.finished) process.nextTick(cb);else stream.once('finish', cb);\n }\n\n state.ended = true;\n stream.writable = false;\n}\n\nfunction onCorkedFinish(corkReq, state, err) {\n var entry = corkReq.entry;\n corkReq.entry = null;\n\n while (entry) {\n var cb = entry.callback;\n state.pendingcb--;\n cb(err);\n entry = entry.next;\n } // reuse the free corkReq.\n\n\n state.corkedRequestsFree.next = corkReq;\n}\n\nObject.defineProperty(Writable.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n if (this._writableState === undefined) {\n return false;\n }\n\n return this._writableState.destroyed;\n },\n set: function set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (!this._writableState) {\n return;\n } // backward compatibility, the user is explicitly\n // managing destroyed\n\n\n this._writableState.destroyed = value;\n }\n});\nWritable.prototype.destroy = destroyImpl.destroy;\nWritable.prototype._undestroy = destroyImpl.undestroy;\n\nWritable.prototype._destroy = function (err, cb) {\n cb(err);\n};","const { handleResponse } = require('fetch-errors')\nconst { createReadStream } = require('fs')\nconst afw = require('async-folder-walker')\nconst FormData = require('form-data')\nconst assert = require('nanoassert')\nconst fetch = require('node-fetch')\nconst { URL } = require('url')\nconst qs = require('qs')\nconst os = require('os')\n\nconst { neocitiesLocalDiff } = require('./lib/folder-diff')\nconst pkg = require('./package.json')\nconst SimpleTimer = require('./lib/timer')\nconst { getStreamLength, meterStream } = require('./lib/stream-meter')\nconst statsHandler = require('./lib/stats-handler')\n\nconst defaultURL = 'https://neocities.org'\n\n// Progress API constants\nconst START = 'start'\nconst PROGRESS = 'progress' // progress updates\nconst STOP = 'stop'\nconst SKIP = 'skip'\n// Progress stages\nconst INSPECTING = 'inspecting'\nconst DIFFING = 'diffing'\nconst APPLYING = 'applying'\n\n/**\n * NeocitiesAPIClient class representing a neocities api client.\n */\nclass NeocitiesAPIClient {\n /**\n * getKey returns an apiKey from a sitename and password.\n * @param {String} sitename username/sitename to log into.\n * @param {String} password password to log in with.\n * @param {Object} [opts] Options object.\n * @param {Object} [opts.url=https://neocities.org] Base URL to request to.\n * @return {Promise} An api key for the sitename..\n */\n static getKey (sitename, password, opts) {\n assert(sitename, 'must pass sitename as first arg')\n assert(typeof sitename === 'string', 'user arg must be a string')\n assert(password, 'must pass a password as the second arg')\n assert(typeof password, 'password arg must be a string')\n\n opts = Object.assign({\n url: defaultURL\n }, opts)\n\n const baseURL = opts.url\n delete opts.url\n\n const url = new URL('/api/key', baseURL)\n url.username = sitename\n url.password = password\n return fetch(url, opts)\n }\n\n static statsHandler (...args) { return statsHandler(...args) }\n\n /**\n * Create an async-neocities api client.\n * @param {string} apiKey An apiKey to make requests with.\n * @param {Object} [opts] Options object.\n * @param {Object} [opts.url=https://neocities.org] Base URL to make requests to.\n * @return {Object} An api client instance.\n */\n constructor (apiKey, opts) {\n assert(apiKey, 'must pass apiKey as first argument')\n assert(typeof apiKey === 'string', 'apiKey must be a string')\n opts = Object.assign({\n url: defaultURL\n })\n\n this.url = opts.url\n this.apiKey = apiKey\n }\n\n get defaultHeaders () {\n return {\n Authorization: `Bearer ${this.apiKey}`,\n Accept: 'application/json',\n 'User-Agent': `async-neocities/${pkg.version} (${os.type()})`\n }\n }\n\n /**\n * Generic GET request to neocities.\n * @param {String} endpoint An endpoint path to GET request.\n * @param {Object} [quieries] An object that gets added to the request in the form of a query string.\n * @param {Object} [opts] Options object.\n * @param {String} [opts.method=GET] The http method to use.\n * @param {Object} [opts.headers] Headers to include in the request.\n * @return {Object} The parsed JSON from the request response.\n */\n get (endpoint, quieries, opts) {\n assert(endpoint, 'must pass endpoint as first argument')\n opts = Object.assign({\n method: 'GET'\n }, opts)\n opts.headers = Object.assign({}, this.defaultHeaders, opts.headers)\n\n let path = `/api/${endpoint}`\n if (quieries) path += `?${qs.stringify(quieries)}`\n\n const url = new URL(path, this.url)\n return fetch(url, opts)\n }\n\n /**\n * Low level POST request to neocities with FormData.\n * @param {String} endpoint The endpoint to make the request to.\n * @param {Array.<{name: String, value: String}>} formEntries Array of form entries.\n * @param {Object} [opts] Options object.\n * @param {String} [opts.method=POST] HTTP Method.\n * @param {Object} [opts.headers] Additional headers to send.\n * @return {Object} The parsed JSON response object.\n */\n async post (endpoint, formEntries, opts) {\n assert(endpoint, 'must pass endpoint as first argument')\n assert(formEntries, 'must pass formEntries as second argument')\n\n function createForm () {\n const form = new FormData()\n for (const { name, value } of formEntries) {\n form.append(name, value)\n }\n\n return form\n }\n\n opts = Object.assign({\n method: 'POST',\n statsCb: () => {}\n }, opts)\n const statsCb = opts.statsCb\n delete opts.statsCb\n\n const stats = {\n totalBytes: await getStreamLength(createForm()),\n bytesWritten: 0\n }\n statsCb(stats)\n\n const form = createForm()\n\n opts.body = meterStream(form, bytesRead => {\n stats.bytesWritten = bytesRead\n statsCb(stats)\n })\n\n opts.headers = Object.assign(\n {},\n this.defaultHeaders,\n form.getHeaders(),\n opts.headers)\n\n const url = new URL(`/api/${endpoint}`, this.url)\n return fetch(url, opts)\n }\n\n /**\n * Upload files to neocities\n */\n upload (files, opts = {}) {\n opts = {\n statsCb: () => {},\n ...opts\n }\n const formEntries = files.map(({ name, path }) => {\n const streamCtor = (next) => next(createReadStream(path))\n streamCtor.path = path\n return {\n name,\n value: streamCtor\n }\n })\n\n return this.post('upload', formEntries, { statsCb: opts.statsCb }).then(handleResponse)\n }\n\n /**\n * delete files from your website\n */\n delete (filenames, opts = {}) {\n assert(filenames, 'filenames is a required first argument')\n assert(Array.isArray(filenames), 'filenames argument must be an array of file paths in your website')\n opts = {\n statsCb: () => {},\n ...opts\n }\n\n const formEntries = filenames.map(file => ({\n name: 'filenames[]',\n value: file\n }))\n\n return this.post('delete', formEntries, { statsCb: opts.statsCb }).then(handleResponse)\n }\n\n list (queries) {\n // args.path: Path to list\n return this.get('list', queries).then(handleResponse)\n }\n\n /**\n * info returns info on your site, or optionally on a sitename querystrign\n * @param {Object} args Querystring arguments to include (e.g. sitename)\n * @return {Promise} Fetch request promise\n */\n info (queries) {\n // args.sitename: sitename to get info on\n return this.get('info', queries).then(handleResponse)\n }\n\n /**\n * Deploy a directory to neocities, skipping already uploaded files and optionally cleaning orphaned files.\n * @param {String} directory The path of the directory to deploy.\n * @param {Object} opts Options object.\n * @param {Boolean} opts.cleanup Boolean to delete orphaned files nor not. Defaults to false.\n * @param {Boolean} opts.statsCb Get access to stat info before uploading is complete.\n * @return {Promise} Promise containing stats about the deploy\n */\n async deploy (directory, opts) {\n opts = {\n cleanup: false, // delete remote orphaned files\n statsCb: () => {},\n ...opts\n }\n\n const statsCb = opts.statsCb\n const totalTime = new SimpleTimer(Date.now())\n\n // INSPECTION STAGE\n statsCb({ stage: INSPECTING, status: START })\n const [localFiles, remoteFiles] = await Promise.all([\n afw.allFiles(directory, { shaper: f => f }),\n this.list().then(res => res.files)\n ])\n statsCb({ stage: INSPECTING, status: STOP })\n\n // DIFFING STAGE\n statsCb({ stage: DIFFING, status: START })\n const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles)\n statsCb({ stage: DIFFING, status: STOP })\n\n // APPLYING STAGE\n if (filesToUpload.length === 0 && (!opts.cleanup || filesToDelete.length === 0)) {\n statsCb({ stage: APPLYING, status: SKIP })\n return stats()\n }\n\n statsCb({ stage: APPLYING, status: START })\n const work = []\n\n if (filesToUpload.length > 0) {\n const uploadJob = this.upload(filesToUpload, {\n statsCb ({ totalBytes, bytesWritten }) {\n statsCb({\n stage: APPLYING,\n status: PROGRESS,\n complete: false,\n totalBytes,\n bytesWritten,\n get progress () {\n return (this.bytesWritten / this.totalBytes) || 0\n }\n })\n }\n }).then((_) => {\n statsCb({\n stage: APPLYING,\n status: PROGRESS,\n complete: true,\n progress: 1.0\n })\n })\n work.push(uploadJob)\n }\n\n if (opts.cleanup && filesToDelete.length > 0) {\n work.push(this.delete(filesToDelete))\n }\n\n await Promise.all(work)\n statsCb({ stage: APPLYING, status: STOP })\n\n return stats()\n\n function stats () {\n totalTime.stop()\n return {\n time: totalTime.elapsed,\n filesToUpload,\n filesToDelete,\n filesSkipped\n }\n }\n }\n}\n\nmodule.exports = NeocitiesAPIClient\n","// Ported from https://github.com/mafintosh/end-of-stream with\n// permission from the author, Mathias Buus (@mafintosh).\n'use strict';\n\nvar ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE;\n\nfunction once(callback) {\n var called = false;\n return function () {\n if (called) return;\n called = true;\n\n for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {\n args[_key] = arguments[_key];\n }\n\n callback.apply(this, args);\n };\n}\n\nfunction noop() {}\n\nfunction isRequest(stream) {\n return stream.setHeader && typeof stream.abort === 'function';\n}\n\nfunction eos(stream, opts, callback) {\n if (typeof opts === 'function') return eos(stream, null, opts);\n if (!opts) opts = {};\n callback = once(callback || noop);\n var readable = opts.readable || opts.readable !== false && stream.readable;\n var writable = opts.writable || opts.writable !== false && stream.writable;\n\n var onlegacyfinish = function onlegacyfinish() {\n if (!stream.writable) onfinish();\n };\n\n var writableEnded = stream._writableState && stream._writableState.finished;\n\n var onfinish = function onfinish() {\n writable = false;\n writableEnded = true;\n if (!readable) callback.call(stream);\n };\n\n var readableEnded = stream._readableState && stream._readableState.endEmitted;\n\n var onend = function onend() {\n readable = false;\n readableEnded = true;\n if (!writable) callback.call(stream);\n };\n\n var onerror = function onerror(err) {\n callback.call(stream, err);\n };\n\n var onclose = function onclose() {\n var err;\n\n if (readable && !readableEnded) {\n if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n\n if (writable && !writableEnded) {\n if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();\n return callback.call(stream, err);\n }\n };\n\n var onrequest = function onrequest() {\n stream.req.on('finish', onfinish);\n };\n\n if (isRequest(stream)) {\n stream.on('complete', onfinish);\n stream.on('abort', onclose);\n if (stream.req) onrequest();else stream.on('request', onrequest);\n } else if (writable && !stream._writableState) {\n // legacy streams\n stream.on('end', onlegacyfinish);\n stream.on('close', onlegacyfinish);\n }\n\n stream.on('end', onend);\n stream.on('finish', onfinish);\n if (opts.error !== false) stream.on('error', onerror);\n stream.on('close', onclose);\n return function () {\n stream.removeListener('complete', onfinish);\n stream.removeListener('abort', onclose);\n stream.removeListener('request', onrequest);\n if (stream.req) stream.req.removeListener('finish', onfinish);\n stream.removeListener('end', onlegacyfinish);\n stream.removeListener('close', onlegacyfinish);\n stream.removeListener('finish', onfinish);\n stream.removeListener('end', onend);\n stream.removeListener('error', onerror);\n stream.removeListener('close', onclose);\n };\n}\n\nmodule.exports = eos;","module.exports = require(\"buffer\");","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function(val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","module.exports =\n{\n parallel : require('./parallel.js'),\n serial : require('./serial.js'),\n serialOrdered : require('./serialOrdered.js')\n};\n","'use strict';\n\nvar stringify = require('./stringify');\nvar parse = require('./parse');\nvar formats = require('./formats');\n\nmodule.exports = {\n formats: formats,\n parse: parse,\n stringify: stringify\n};\n","var stream = require('readable-stream')\nvar eos = require('end-of-stream')\nvar inherits = require('inherits')\nvar shift = require('stream-shift')\n\nvar SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)\n ? Buffer.from([0])\n : new Buffer([0])\n\nvar onuncork = function(self, fn) {\n if (self._corked) self.once('uncork', fn)\n else fn()\n}\n\nvar autoDestroy = function (self, err) {\n if (self._autoDestroy) self.destroy(err)\n}\n\nvar destroyer = function(self, end) {\n return function(err) {\n if (err) autoDestroy(self, err.message === 'premature close' ? null : err)\n else if (end && !self._ended) self.end()\n }\n}\n\nvar end = function(ws, fn) {\n if (!ws) return fn()\n if (ws._writableState && ws._writableState.finished) return fn()\n if (ws._writableState) return ws.end(fn)\n ws.end()\n fn()\n}\n\nvar noop = function() {}\n\nvar toStreams2 = function(rs) {\n return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)\n}\n\nvar Duplexify = function(writable, readable, opts) {\n if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)\n stream.Duplex.call(this, opts)\n\n this._writable = null\n this._readable = null\n this._readable2 = null\n\n this._autoDestroy = !opts || opts.autoDestroy !== false\n this._forwardDestroy = !opts || opts.destroy !== false\n this._forwardEnd = !opts || opts.end !== false\n this._corked = 1 // start corked\n this._ondrain = null\n this._drained = false\n this._forwarding = false\n this._unwrite = null\n this._unread = null\n this._ended = false\n\n this.destroyed = false\n\n if (writable) this.setWritable(writable)\n if (readable) this.setReadable(readable)\n}\n\ninherits(Duplexify, stream.Duplex)\n\nDuplexify.obj = function(writable, readable, opts) {\n if (!opts) opts = {}\n opts.objectMode = true\n opts.highWaterMark = 16\n return new Duplexify(writable, readable, opts)\n}\n\nDuplexify.prototype.cork = function() {\n if (++this._corked === 1) this.emit('cork')\n}\n\nDuplexify.prototype.uncork = function() {\n if (this._corked && --this._corked === 0) this.emit('uncork')\n}\n\nDuplexify.prototype.setWritable = function(writable) {\n if (this._unwrite) this._unwrite()\n\n if (this.destroyed) {\n if (writable && writable.destroy) writable.destroy()\n return\n }\n\n if (writable === null || writable === false) {\n this.end()\n return\n }\n\n var self = this\n var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))\n\n var ondrain = function() {\n var ondrain = self._ondrain\n self._ondrain = null\n if (ondrain) ondrain()\n }\n\n var clear = function() {\n self._writable.removeListener('drain', ondrain)\n unend()\n }\n\n if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks\n\n this._writable = writable\n this._writable.on('drain', ondrain)\n this._unwrite = clear\n\n this.uncork() // always uncork setWritable\n}\n\nDuplexify.prototype.setReadable = function(readable) {\n if (this._unread) this._unread()\n\n if (this.destroyed) {\n if (readable && readable.destroy) readable.destroy()\n return\n }\n\n if (readable === null || readable === false) {\n this.push(null)\n this.resume()\n return\n }\n\n var self = this\n var unend = eos(readable, {writable:false, readable:true}, destroyer(this))\n\n var onreadable = function() {\n self._forward()\n }\n\n var onend = function() {\n self.push(null)\n }\n\n var clear = function() {\n self._readable2.removeListener('readable', onreadable)\n self._readable2.removeListener('end', onend)\n unend()\n }\n\n this._drained = true\n this._readable = readable\n this._readable2 = readable._readableState ? readable : toStreams2(readable)\n this._readable2.on('readable', onreadable)\n this._readable2.on('end', onend)\n this._unread = clear\n\n this._forward()\n}\n\nDuplexify.prototype._read = function() {\n this._drained = true\n this._forward()\n}\n\nDuplexify.prototype._forward = function() {\n if (this._forwarding || !this._readable2 || !this._drained) return\n this._forwarding = true\n\n var data\n\n while (this._drained && (data = shift(this._readable2)) !== null) {\n if (this.destroyed) continue\n this._drained = this.push(data)\n }\n\n this._forwarding = false\n}\n\nDuplexify.prototype.destroy = function(err, cb) {\n if (!cb) cb = noop\n if (this.destroyed) return cb(null)\n this.destroyed = true\n\n var self = this\n process.nextTick(function() {\n self._destroy(err)\n cb(null)\n })\n}\n\nDuplexify.prototype._destroy = function(err) {\n if (err) {\n var ondrain = this._ondrain\n this._ondrain = null\n if (ondrain) ondrain(err)\n else this.emit('error', err)\n }\n\n if (this._forwardDestroy) {\n if (this._readable && this._readable.destroy) this._readable.destroy()\n if (this._writable && this._writable.destroy) this._writable.destroy()\n }\n\n this.emit('close')\n}\n\nDuplexify.prototype._write = function(data, enc, cb) {\n if (this.destroyed) return\n if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))\n if (data === SIGNAL_FLUSH) return this._finish(cb)\n if (!this._writable) return cb()\n\n if (this._writable.write(data) === false) this._ondrain = cb\n else if (!this.destroyed) cb()\n}\n\nDuplexify.prototype._finish = function(cb) {\n var self = this\n this.emit('preend')\n onuncork(this, function() {\n end(self._forwardEnd && self._writable, function() {\n // haxx to not emit prefinish twice\n if (self._writableState.prefinished === false) self._writableState.prefinished = true\n self.emit('prefinish')\n onuncork(self, cb)\n })\n })\n}\n\nDuplexify.prototype.end = function(data, enc, cb) {\n if (typeof data === 'function') return this.end(null, null, data)\n if (typeof enc === 'function') return this.end(data, null, enc)\n this._ended = true\n if (data) this.write(data)\n if (!this._writableState.ending) this.write(SIGNAL_FLUSH)\n return stream.Writable.prototype.end.call(this, cb)\n}\n\nmodule.exports = Duplexify\n","module.exports = require(\"stream\");","module.exports = require(\"crypto\");","const prettyBytes = require('pretty-bytes')\n\n// Progress API constants\nconst START = 'start'\nconst PROGRESS = 'progress' // progress updates\nconst STOP = 'stop'\nconst SKIP = 'skip'\n\nfunction statsHandler (opts = {}) {\n let progressInterval\n const lastStats = {}\n\n return (stats) => {\n switch (stats.status) {\n case START: {\n console.log(`Starting ${stats.stage} stage...`)\n break\n }\n case STOP: {\n console.log(`Finished ${stats.stage} stage.`)\n break\n }\n case SKIP: {\n console.log(`Skipping ${stats.stage} stage.`)\n break\n }\n case PROGRESS: {\n progressHandler(stats)\n break\n }\n default: {\n }\n }\n }\n\n function progressHandler (stats) {\n Object.assign(lastStats, stats)\n if (!stats.complete && stats.progress < 1) {\n if (!progressInterval) {\n progressInterval = setInterval(logProgress, 500, lastStats)\n logProgress(lastStats)\n }\n } else {\n if (progressInterval) {\n clearInterval(progressInterval)\n logProgress(lastStats)\n }\n }\n }\n\n function logProgress (stats) {\n let logLine = `Stage ${stats.stage}: ${(stats.progress * 100).toFixed(2)}%`\n if (stats.bytesWritten != null && stats.totalBytes != null) {\n logLine = logLine + ` (${prettyBytes(stats.bytesWritten)} / ${prettyBytes(stats.totalBytes)})`\n }\n console.log(logLine)\n }\n}\n\nmodule.exports = statsHandler\n","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = parallel;\n\n/**\n * Runs iterator over provided array elements in parallel\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction parallel(list, iterator, callback)\n{\n var state = initState(list);\n\n while (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, function(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n // looks like it's the last one\n if (Object.keys(state.jobs).length === 0)\n {\n callback(null, state.results);\n return;\n }\n });\n\n state.index++;\n }\n\n return terminator.bind(state, callback);\n}\n","module.exports = require('stream');\n","\"use strict\";\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst os = __importStar(require(\"os\"));\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return (s || '')\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return (s || '')\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","var once = require('once')\nvar eos = require('end-of-stream')\nvar fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes\n\nvar noop = function () {}\nvar ancient = /^v?\\.0/.test(process.version)\n\nvar isFn = function (fn) {\n return typeof fn === 'function'\n}\n\nvar isFS = function (stream) {\n if (!ancient) return false // newer node version do not need to care about fs is a special way\n if (!fs) return false // browser\n return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)\n}\n\nvar isRequest = function (stream) {\n return stream.setHeader && isFn(stream.abort)\n}\n\nvar destroyer = function (stream, reading, writing, callback) {\n callback = once(callback)\n\n var closed = false\n stream.on('close', function () {\n closed = true\n })\n\n eos(stream, {readable: reading, writable: writing}, function (err) {\n if (err) return callback(err)\n closed = true\n callback()\n })\n\n var destroyed = false\n return function (err) {\n if (closed) return\n if (destroyed) return\n destroyed = true\n\n if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks\n if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want\n\n if (isFn(stream.destroy)) return stream.destroy()\n\n callback(err || new Error('stream was destroyed'))\n }\n}\n\nvar call = function (fn) {\n fn()\n}\n\nvar pipe = function (from, to) {\n return from.pipe(to)\n}\n\nvar pump = function () {\n var streams = Array.prototype.slice.call(arguments)\n var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop\n\n if (Array.isArray(streams[0])) streams = streams[0]\n if (streams.length < 2) throw new Error('pump requires two streams per minimum')\n\n var error\n var destroys = streams.map(function (stream, i) {\n var reading = i < streams.length - 1\n var writing = i > 0\n return destroyer(stream, reading, writing, function (err) {\n if (!error) error = err\n if (err) destroys.forEach(call)\n if (reading) return\n destroys.forEach(call)\n callback(error)\n })\n })\n\n return streams.reduce(pipe)\n}\n\nmodule.exports = pump\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\n result[\"default\"] = mod;\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst command_1 = require(\"./command\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable\n */\nfunction exportVariable(name, val) {\n process.env[name] = val;\n command_1.issueCommand('set-env', { name }, val);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n command_1.issueCommand('add-path', {}, inputPath);\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input. The value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store\n */\nfunction setOutput(name, value) {\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message\n */\nfunction error(message) {\n command_1.issue('error', message);\n}\nexports.error = error;\n/**\n * Adds an warning issue\n * @param message warning issue message\n */\nfunction warning(message) {\n command_1.issue('warning', message);\n}\nexports.warning = warning;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store\n */\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\n//# sourceMappingURL=core.js.map","module.exports = shift\n\nfunction shift (stream) {\n var rs = stream._readableState\n if (!rs) return null\n return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs))\n}\n\nfunction getStateLength (state) {\n if (state.buffer.length) {\n // Since node 6.3.0 state.buffer is a BufferList not an array\n if (state.buffer.head) {\n return state.buffer.head.data.length\n }\n\n return state.buffer[0].length\n }\n\n return state.length\n}\n","module.exports = assert\n\nclass AssertionError extends Error {}\nAssertionError.prototype.name = 'AssertionError'\n\n/**\n * Minimal assert function\n * @param {any} t Value to check if falsy\n * @param {string=} m Optional assertion error message\n * @throws {AssertionError}\n */\nfunction assert (t, m) {\n if (!t) {\n var err = new AssertionError(m)\n if (Error.captureStackTrace) Error.captureStackTrace(err, assert)\n throw err\n }\n}\n","module.exports = defer;\n\n/**\n * Runs provided function on next iteration of the event loop\n *\n * @param {function} fn - function to run\n */\nfunction defer(fn)\n{\n var nextTick = typeof setImmediate == 'function'\n ? setImmediate\n : (\n typeof process == 'object' && typeof process.nextTick == 'function'\n ? process.nextTick\n : null\n );\n\n if (nextTick)\n {\n nextTick(fn);\n }\n else\n {\n setTimeout(fn, 0);\n }\n}\n","var util = require('util');\nvar Stream = require('stream').Stream;\nvar DelayedStream = require('delayed-stream');\n\nmodule.exports = CombinedStream;\nfunction CombinedStream() {\n this.writable = false;\n this.readable = true;\n this.dataSize = 0;\n this.maxDataSize = 2 * 1024 * 1024;\n this.pauseStreams = true;\n\n this._released = false;\n this._streams = [];\n this._currentStream = null;\n this._insideLoop = false;\n this._pendingNext = false;\n}\nutil.inherits(CombinedStream, Stream);\n\nCombinedStream.create = function(options) {\n var combinedStream = new this();\n\n options = options || {};\n for (var option in options) {\n combinedStream[option] = options[option];\n }\n\n return combinedStream;\n};\n\nCombinedStream.isStreamLike = function(stream) {\n return (typeof stream !== 'function')\n && (typeof stream !== 'string')\n && (typeof stream !== 'boolean')\n && (typeof stream !== 'number')\n && (!Buffer.isBuffer(stream));\n};\n\nCombinedStream.prototype.append = function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n\n if (isStreamLike) {\n if (!(stream instanceof DelayedStream)) {\n var newStream = DelayedStream.create(stream, {\n maxDataSize: Infinity,\n pauseStream: this.pauseStreams,\n });\n stream.on('data', this._checkDataSize.bind(this));\n stream = newStream;\n }\n\n this._handleErrors(stream);\n\n if (this.pauseStreams) {\n stream.pause();\n }\n }\n\n this._streams.push(stream);\n return this;\n};\n\nCombinedStream.prototype.pipe = function(dest, options) {\n Stream.prototype.pipe.call(this, dest, options);\n this.resume();\n return dest;\n};\n\nCombinedStream.prototype._getNext = function() {\n this._currentStream = null;\n\n if (this._insideLoop) {\n this._pendingNext = true;\n return; // defer call\n }\n\n this._insideLoop = true;\n try {\n do {\n this._pendingNext = false;\n this._realGetNext();\n } while (this._pendingNext);\n } finally {\n this._insideLoop = false;\n }\n};\n\nCombinedStream.prototype._realGetNext = function() {\n var stream = this._streams.shift();\n\n\n if (typeof stream == 'undefined') {\n this.end();\n return;\n }\n\n if (typeof stream !== 'function') {\n this._pipeNext(stream);\n return;\n }\n\n var getStream = stream;\n getStream(function(stream) {\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('data', this._checkDataSize.bind(this));\n this._handleErrors(stream);\n }\n\n this._pipeNext(stream);\n }.bind(this));\n};\n\nCombinedStream.prototype._pipeNext = function(stream) {\n this._currentStream = stream;\n\n var isStreamLike = CombinedStream.isStreamLike(stream);\n if (isStreamLike) {\n stream.on('end', this._getNext.bind(this));\n stream.pipe(this, {end: false});\n return;\n }\n\n var value = stream;\n this.write(value);\n this._getNext();\n};\n\nCombinedStream.prototype._handleErrors = function(stream) {\n var self = this;\n stream.on('error', function(err) {\n self._emitError(err);\n });\n};\n\nCombinedStream.prototype.write = function(data) {\n this.emit('data', data);\n};\n\nCombinedStream.prototype.pause = function() {\n if (!this.pauseStreams) {\n return;\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();\n this.emit('pause');\n};\n\nCombinedStream.prototype.resume = function() {\n if (!this._released) {\n this._released = true;\n this.writable = true;\n this._getNext();\n }\n\n if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();\n this.emit('resume');\n};\n\nCombinedStream.prototype.end = function() {\n this._reset();\n this.emit('end');\n};\n\nCombinedStream.prototype.destroy = function() {\n this._reset();\n this.emit('close');\n};\n\nCombinedStream.prototype._reset = function() {\n this.writable = false;\n this._streams = [];\n this._currentStream = null;\n};\n\nCombinedStream.prototype._checkDataSize = function() {\n this._updateDataSize();\n if (this.dataSize <= this.maxDataSize) {\n return;\n }\n\n var message =\n 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';\n this._emitError(new Error(message));\n};\n\nCombinedStream.prototype._updateDataSize = function() {\n this.dataSize = 0;\n\n var self = this;\n this._streams.forEach(function(stream) {\n if (!stream.dataSize) {\n return;\n }\n\n self.dataSize += stream.dataSize;\n });\n\n if (this._currentStream && this._currentStream.dataSize) {\n this.dataSize += this._currentStream.dataSize;\n }\n};\n\nCombinedStream.prototype._emitError = function(err) {\n this._reset();\n this.emit('error', err);\n};\n","'use strict';\n\nconst codes = {};\n\nfunction createErrorType(code, message, Base) {\n if (!Base) {\n Base = Error\n }\n\n function getMessage (arg1, arg2, arg3) {\n if (typeof message === 'string') {\n return message\n } else {\n return message(arg1, arg2, arg3)\n }\n }\n\n class NodeError extends Base {\n constructor (arg1, arg2, arg3) {\n super(getMessage(arg1, arg2, arg3));\n }\n }\n\n NodeError.prototype.name = Base.name;\n NodeError.prototype.code = code;\n\n codes[code] = NodeError;\n}\n\n// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js\nfunction oneOf(expected, thing) {\n if (Array.isArray(expected)) {\n const len = expected.length;\n expected = expected.map((i) => String(i));\n if (len > 2) {\n return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +\n expected[len - 1];\n } else if (len === 2) {\n return `one of ${thing} ${expected[0]} or ${expected[1]}`;\n } else {\n return `of ${thing} ${expected[0]}`;\n }\n } else {\n return `of ${thing} ${String(expected)}`;\n }\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith\nfunction startsWith(str, search, pos) {\n\treturn str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith\nfunction endsWith(str, search, this_len) {\n\tif (this_len === undefined || this_len > str.length) {\n\t\tthis_len = str.length;\n\t}\n\treturn str.substring(this_len - search.length, this_len) === search;\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes\nfunction includes(str, search, start) {\n if (typeof start !== 'number') {\n start = 0;\n }\n\n if (start + search.length > str.length) {\n return false;\n } else {\n return str.indexOf(search, start) !== -1;\n }\n}\n\ncreateErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {\n return 'The value \"' + value + '\" is invalid for option \"' + name + '\"'\n}, TypeError);\ncreateErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {\n // determiner: 'must be' or 'must not be'\n let determiner;\n if (typeof expected === 'string' && startsWith(expected, 'not ')) {\n determiner = 'must not be';\n expected = expected.replace(/^not /, '');\n } else {\n determiner = 'must be';\n }\n\n let msg;\n if (endsWith(name, ' argument')) {\n // For cases like 'first argument'\n msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;\n } else {\n const type = includes(name, '.') ? 'property' : 'argument';\n msg = `The \"${name}\" ${type} ${determiner} ${oneOf(expected, 'type')}`;\n }\n\n msg += `. Received type ${typeof actual}`;\n return msg;\n}, TypeError);\ncreateErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');\ncreateErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {\n return 'The ' + name + ' method is not implemented'\n});\ncreateErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');\ncreateErrorType('ERR_STREAM_DESTROYED', function (name) {\n return 'Cannot call ' + name + ' after a stream was destroyed';\n});\ncreateErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');\ncreateErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');\ncreateErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');\ncreateErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);\ncreateErrorType('ERR_UNKNOWN_ENCODING', function (arg) {\n return 'Unknown encoding: ' + arg\n}, TypeError);\ncreateErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');\n\nmodule.exports.codes = codes;\n","// API\nmodule.exports = abort;\n\n/**\n * Aborts leftover active jobs\n *\n * @param {object} state - current state object\n */\nfunction abort(state)\n{\n Object.keys(state.jobs).forEach(clean.bind(state));\n\n // reset leftover jobs\n state.jobs = {};\n}\n\n/**\n * Cleans up leftover job by invoking abort function for the provided job id\n *\n * @this state\n * @param {string|number} key - job id to abort\n */\nfunction clean(key)\n{\n if (typeof this.jobs[key] == 'function')\n {\n this.jobs[key]();\n }\n}\n","var Stream = require('stream');\nif (process.env.READABLE_STREAM === 'disable' && Stream) {\n module.exports = Stream.Readable;\n Object.assign(module.exports, Stream);\n module.exports.Stream = Stream;\n} else {\n exports = module.exports = require('./lib/_stream_readable.js');\n exports.Stream = Stream || exports;\n exports.Readable = exports;\n exports.Writable = require('./lib/_stream_writable.js');\n exports.Duplex = require('./lib/_stream_duplex.js');\n exports.Transform = require('./lib/_stream_transform.js');\n exports.PassThrough = require('./lib/_stream_passthrough.js');\n exports.finished = require('./lib/internal/streams/end-of-stream.js');\n exports.pipeline = require('./lib/internal/streams/pipeline.js');\n}\n","'use strict';\n\nvar has = Object.prototype.hasOwnProperty;\nvar isArray = Array.isArray;\n\nvar hexTable = (function () {\n var array = [];\n for (var i = 0; i < 256; ++i) {\n array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase());\n }\n\n return array;\n}());\n\nvar compactQueue = function compactQueue(queue) {\n while (queue.length > 1) {\n var item = queue.pop();\n var obj = item.obj[item.prop];\n\n if (isArray(obj)) {\n var compacted = [];\n\n for (var j = 0; j < obj.length; ++j) {\n if (typeof obj[j] !== 'undefined') {\n compacted.push(obj[j]);\n }\n }\n\n item.obj[item.prop] = compacted;\n }\n }\n};\n\nvar arrayToObject = function arrayToObject(source, options) {\n var obj = options && options.plainObjects ? Object.create(null) : {};\n for (var i = 0; i < source.length; ++i) {\n if (typeof source[i] !== 'undefined') {\n obj[i] = source[i];\n }\n }\n\n return obj;\n};\n\nvar merge = function merge(target, source, options) {\n /* eslint no-param-reassign: 0 */\n if (!source) {\n return target;\n }\n\n if (typeof source !== 'object') {\n if (isArray(target)) {\n target.push(source);\n } else if (target && typeof target === 'object') {\n if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) {\n target[source] = true;\n }\n } else {\n return [target, source];\n }\n\n return target;\n }\n\n if (!target || typeof target !== 'object') {\n return [target].concat(source);\n }\n\n var mergeTarget = target;\n if (isArray(target) && !isArray(source)) {\n mergeTarget = arrayToObject(target, options);\n }\n\n if (isArray(target) && isArray(source)) {\n source.forEach(function (item, i) {\n if (has.call(target, i)) {\n var targetItem = target[i];\n if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') {\n target[i] = merge(targetItem, item, options);\n } else {\n target.push(item);\n }\n } else {\n target[i] = item;\n }\n });\n return target;\n }\n\n return Object.keys(source).reduce(function (acc, key) {\n var value = source[key];\n\n if (has.call(acc, key)) {\n acc[key] = merge(acc[key], value, options);\n } else {\n acc[key] = value;\n }\n return acc;\n }, mergeTarget);\n};\n\nvar assign = function assignSingleSource(target, source) {\n return Object.keys(source).reduce(function (acc, key) {\n acc[key] = source[key];\n return acc;\n }, target);\n};\n\nvar decode = function (str, decoder, charset) {\n var strWithoutPlus = str.replace(/\\+/g, ' ');\n if (charset === 'iso-8859-1') {\n // unescape never throws, no try...catch needed:\n return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape);\n }\n // utf-8\n try {\n return decodeURIComponent(strWithoutPlus);\n } catch (e) {\n return strWithoutPlus;\n }\n};\n\nvar encode = function encode(str, defaultEncoder, charset) {\n // This code was originally written by Brian White (mscdex) for the io.js core querystring library.\n // It has been adapted here for stricter adherence to RFC 3986\n if (str.length === 0) {\n return str;\n }\n\n var string = str;\n if (typeof str === 'symbol') {\n string = Symbol.prototype.toString.call(str);\n } else if (typeof str !== 'string') {\n string = String(str);\n }\n\n if (charset === 'iso-8859-1') {\n return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) {\n return '%26%23' + parseInt($0.slice(2), 16) + '%3B';\n });\n }\n\n var out = '';\n for (var i = 0; i < string.length; ++i) {\n var c = string.charCodeAt(i);\n\n if (\n c === 0x2D // -\n || c === 0x2E // .\n || c === 0x5F // _\n || c === 0x7E // ~\n || (c >= 0x30 && c <= 0x39) // 0-9\n || (c >= 0x41 && c <= 0x5A) // a-z\n || (c >= 0x61 && c <= 0x7A) // A-Z\n ) {\n out += string.charAt(i);\n continue;\n }\n\n if (c < 0x80) {\n out = out + hexTable[c];\n continue;\n }\n\n if (c < 0x800) {\n out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]);\n continue;\n }\n\n if (c < 0xD800 || c >= 0xE000) {\n out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]);\n continue;\n }\n\n i += 1;\n c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF));\n out += hexTable[0xF0 | (c >> 18)]\n + hexTable[0x80 | ((c >> 12) & 0x3F)]\n + hexTable[0x80 | ((c >> 6) & 0x3F)]\n + hexTable[0x80 | (c & 0x3F)];\n }\n\n return out;\n};\n\nvar compact = function compact(value) {\n var queue = [{ obj: { o: value }, prop: 'o' }];\n var refs = [];\n\n for (var i = 0; i < queue.length; ++i) {\n var item = queue[i];\n var obj = item.obj[item.prop];\n\n var keys = Object.keys(obj);\n for (var j = 0; j < keys.length; ++j) {\n var key = keys[j];\n var val = obj[key];\n if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) {\n queue.push({ obj: obj, prop: key });\n refs.push(val);\n }\n }\n }\n\n compactQueue(queue);\n\n return value;\n};\n\nvar isRegExp = function isRegExp(obj) {\n return Object.prototype.toString.call(obj) === '[object RegExp]';\n};\n\nvar isBuffer = function isBuffer(obj) {\n if (!obj || typeof obj !== 'object') {\n return false;\n }\n\n return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj));\n};\n\nvar combine = function combine(a, b) {\n return [].concat(a, b);\n};\n\nmodule.exports = {\n arrayToObject: arrayToObject,\n assign: assign,\n combine: combine,\n compact: compact,\n decode: decode,\n encode: encode,\n isBuffer: isBuffer,\n isRegExp: isRegExp,\n merge: merge\n};\n","'use strict';\n\nconst BYTE_UNITS = [\n\t'B',\n\t'kB',\n\t'MB',\n\t'GB',\n\t'TB',\n\t'PB',\n\t'EB',\n\t'ZB',\n\t'YB'\n];\n\nconst BIT_UNITS = [\n\t'b',\n\t'kbit',\n\t'Mbit',\n\t'Gbit',\n\t'Tbit',\n\t'Pbit',\n\t'Ebit',\n\t'Zbit',\n\t'Ybit'\n];\n\n/*\nFormats the given number using `Number#toLocaleString`.\n- If locale is a string, the value is expected to be a locale-key (for example: `de`).\n- If locale is true, the system default locale is used for translation.\n- If no value for locale is specified, the number is returned unmodified.\n*/\nconst toLocaleString = (number, locale) => {\n\tlet result = number;\n\tif (typeof locale === 'string') {\n\t\tresult = number.toLocaleString(locale);\n\t} else if (locale === true) {\n\t\tresult = number.toLocaleString();\n\t}\n\n\treturn result;\n};\n\nmodule.exports = (number, options) => {\n\tif (!Number.isFinite(number)) {\n\t\tthrow new TypeError(`Expected a finite number, got ${typeof number}: ${number}`);\n\t}\n\n\toptions = Object.assign({bits: false}, options);\n\tconst UNITS = options.bits ? BIT_UNITS : BYTE_UNITS;\n\n\tif (options.signed && number === 0) {\n\t\treturn ' 0 ' + UNITS[0];\n\t}\n\n\tconst isNegative = number < 0;\n\tconst prefix = isNegative ? '-' : (options.signed ? '+' : '');\n\n\tif (isNegative) {\n\t\tnumber = -number;\n\t}\n\n\tif (number < 1) {\n\t\tconst numberString = toLocaleString(number, options.locale);\n\t\treturn prefix + numberString + ' ' + UNITS[0];\n\t}\n\n\tconst exponent = Math.min(Math.floor(Math.log10(number) / 3), UNITS.length - 1);\n\t// eslint-disable-next-line unicorn/prefer-exponentiation-operator\n\tnumber = Number((number / Math.pow(1000, exponent)).toPrecision(3));\n\tconst numberString = toLocaleString(number, options.locale);\n\n\tconst unit = UNITS[exponent];\n\n\treturn prefix + numberString + ' ' + unit;\n};\n","module.exports = require(\"http\");","module.exports = require(\"events\");","module.exports = require(\"path\");","module.exports = require(\"util\");","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\n/**/\n\nvar Buffer = require('safe-buffer').Buffer;\n/**/\n\nvar isEncoding = Buffer.isEncoding || function (encoding) {\n encoding = '' + encoding;\n switch (encoding && encoding.toLowerCase()) {\n case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':\n return true;\n default:\n return false;\n }\n};\n\nfunction _normalizeEncoding(enc) {\n if (!enc) return 'utf8';\n var retried;\n while (true) {\n switch (enc) {\n case 'utf8':\n case 'utf-8':\n return 'utf8';\n case 'ucs2':\n case 'ucs-2':\n case 'utf16le':\n case 'utf-16le':\n return 'utf16le';\n case 'latin1':\n case 'binary':\n return 'latin1';\n case 'base64':\n case 'ascii':\n case 'hex':\n return enc;\n default:\n if (retried) return; // undefined\n enc = ('' + enc).toLowerCase();\n retried = true;\n }\n }\n};\n\n// Do not cache `Buffer.isEncoding` when checking encoding names as some\n// modules monkey-patch it to support additional encodings\nfunction normalizeEncoding(enc) {\n var nenc = _normalizeEncoding(enc);\n if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);\n return nenc || enc;\n}\n\n// StringDecoder provides an interface for efficiently splitting a series of\n// buffers into a series of JS strings without breaking apart multi-byte\n// characters.\nexports.StringDecoder = StringDecoder;\nfunction StringDecoder(encoding) {\n this.encoding = normalizeEncoding(encoding);\n var nb;\n switch (this.encoding) {\n case 'utf16le':\n this.text = utf16Text;\n this.end = utf16End;\n nb = 4;\n break;\n case 'utf8':\n this.fillLast = utf8FillLast;\n nb = 4;\n break;\n case 'base64':\n this.text = base64Text;\n this.end = base64End;\n nb = 3;\n break;\n default:\n this.write = simpleWrite;\n this.end = simpleEnd;\n return;\n }\n this.lastNeed = 0;\n this.lastTotal = 0;\n this.lastChar = Buffer.allocUnsafe(nb);\n}\n\nStringDecoder.prototype.write = function (buf) {\n if (buf.length === 0) return '';\n var r;\n var i;\n if (this.lastNeed) {\n r = this.fillLast(buf);\n if (r === undefined) return '';\n i = this.lastNeed;\n this.lastNeed = 0;\n } else {\n i = 0;\n }\n if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);\n return r || '';\n};\n\nStringDecoder.prototype.end = utf8End;\n\n// Returns only complete characters in a Buffer\nStringDecoder.prototype.text = utf8Text;\n\n// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer\nStringDecoder.prototype.fillLast = function (buf) {\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);\n this.lastNeed -= buf.length;\n};\n\n// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a\n// continuation byte. If an invalid byte is detected, -2 is returned.\nfunction utf8CheckByte(byte) {\n if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;\n return byte >> 6 === 0x02 ? -1 : -2;\n}\n\n// Checks at most 3 bytes at the end of a Buffer in order to detect an\n// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)\n// needed to complete the UTF-8 character (if applicable) are returned.\nfunction utf8CheckIncomplete(self, buf, i) {\n var j = buf.length - 1;\n if (j < i) return 0;\n var nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 1;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) self.lastNeed = nb - 2;\n return nb;\n }\n if (--j < i || nb === -2) return 0;\n nb = utf8CheckByte(buf[j]);\n if (nb >= 0) {\n if (nb > 0) {\n if (nb === 2) nb = 0;else self.lastNeed = nb - 3;\n }\n return nb;\n }\n return 0;\n}\n\n// Validates as many continuation bytes for a multi-byte UTF-8 character as\n// needed or are available. If we see a non-continuation byte where we expect\n// one, we \"replace\" the validated continuation bytes we've seen so far with\n// a single UTF-8 replacement character ('\\ufffd'), to match v8's UTF-8 decoding\n// behavior. The continuation byte check is included three times in the case\n// where all of the continuation bytes for a character exist in the same buffer.\n// It is also done this way as a slight performance increase instead of using a\n// loop.\nfunction utf8CheckExtraBytes(self, buf, p) {\n if ((buf[0] & 0xC0) !== 0x80) {\n self.lastNeed = 0;\n return '\\ufffd';\n }\n if (self.lastNeed > 1 && buf.length > 1) {\n if ((buf[1] & 0xC0) !== 0x80) {\n self.lastNeed = 1;\n return '\\ufffd';\n }\n if (self.lastNeed > 2 && buf.length > 2) {\n if ((buf[2] & 0xC0) !== 0x80) {\n self.lastNeed = 2;\n return '\\ufffd';\n }\n }\n }\n}\n\n// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.\nfunction utf8FillLast(buf) {\n var p = this.lastTotal - this.lastNeed;\n var r = utf8CheckExtraBytes(this, buf, p);\n if (r !== undefined) return r;\n if (this.lastNeed <= buf.length) {\n buf.copy(this.lastChar, p, 0, this.lastNeed);\n return this.lastChar.toString(this.encoding, 0, this.lastTotal);\n }\n buf.copy(this.lastChar, p, 0, buf.length);\n this.lastNeed -= buf.length;\n}\n\n// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a\n// partial character, the character's bytes are buffered until the required\n// number of bytes are available.\nfunction utf8Text(buf, i) {\n var total = utf8CheckIncomplete(this, buf, i);\n if (!this.lastNeed) return buf.toString('utf8', i);\n this.lastTotal = total;\n var end = buf.length - (total - this.lastNeed);\n buf.copy(this.lastChar, 0, end);\n return buf.toString('utf8', i, end);\n}\n\n// For UTF-8, a replacement character is added when ending on a partial\n// character.\nfunction utf8End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + '\\ufffd';\n return r;\n}\n\n// UTF-16LE typically needs two bytes per character, but even if we have an even\n// number of bytes available, we need to check if we end on a leading/high\n// surrogate. In that case, we need to wait for the next two bytes in order to\n// decode the last character properly.\nfunction utf16Text(buf, i) {\n if ((buf.length - i) % 2 === 0) {\n var r = buf.toString('utf16le', i);\n if (r) {\n var c = r.charCodeAt(r.length - 1);\n if (c >= 0xD800 && c <= 0xDBFF) {\n this.lastNeed = 2;\n this.lastTotal = 4;\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n return r.slice(0, -1);\n }\n }\n return r;\n }\n this.lastNeed = 1;\n this.lastTotal = 2;\n this.lastChar[0] = buf[buf.length - 1];\n return buf.toString('utf16le', i, buf.length - 1);\n}\n\n// For UTF-16LE we do not explicitly append special replacement characters if we\n// end on a partial character, we simply let v8 handle that.\nfunction utf16End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) {\n var end = this.lastTotal - this.lastNeed;\n return r + this.lastChar.toString('utf16le', 0, end);\n }\n return r;\n}\n\nfunction base64Text(buf, i) {\n var n = (buf.length - i) % 3;\n if (n === 0) return buf.toString('base64', i);\n this.lastNeed = 3 - n;\n this.lastTotal = 3;\n if (n === 1) {\n this.lastChar[0] = buf[buf.length - 1];\n } else {\n this.lastChar[0] = buf[buf.length - 2];\n this.lastChar[1] = buf[buf.length - 1];\n }\n return buf.toString('base64', i, buf.length - n);\n}\n\nfunction base64End(buf) {\n var r = buf && buf.length ? this.write(buf) : '';\n if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);\n return r;\n}\n\n// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)\nfunction simpleWrite(buf) {\n return buf.toString(this.encoding);\n}\n\nfunction simpleEnd(buf) {\n return buf && buf.length ? this.write(buf) : '';\n}","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","const fs = require('fs')\nconst path = require('path')\n\nconst { readdir, lstat } = fs.promises\n\n/**\n * pathFilter lets you filter files based on a resolved `filepath`.\n * @callback pathFilter\n * @param {String} filepath - The resolved `filepath` of the file to test for filtering.\n *\n * @return {Boolean} Return false to filter the given `filepath` and true to include it.\n */\nconst pathFilter = filepath => true\n\n/**\n * statFilter lets you filter files based on a lstat object.\n * @callback statFilter\n * @param {Object} st - A fs.Stats instance.\n *\n * @return {Boolean} Return false to filter the given `filepath` and true to include it.\n */\nconst statFilter = filepath => true\n\n/**\n * FWStats is the object that the okdistribute/folder-walker module returns by default.\n *\n * @typedef FWStats\n * @property {String} root - The filepath of the directory where the walk started.\n * @property {String} filepath - The resolved filepath.\n * @property {Object} stat - A fs.Stats instance.\n * @property {String} relname - The relative path to `root`.\n * @property {String} basename - The resolved filepath of the files containing directory.\n */\n\n/**\n * shaper lets you change the shape of the returned file data from walk-time stats.\n * @callback shaper\n * @param {FWStats} fwStats - The same status object returned from folder-walker.\n *\n * @return {*} - Whatever you want returned from the directory walk.\n */\nconst shaper = ({ root, filepath, stat, relname, basename }) => filepath\n\n/**\n * Options object\n *\n * @typedef Opts\n * @property {pathFilter} [pathFilter] - A pathFilter cb.\n * @property {statFilter} [statFilter] - A statFilter cb.\n * @property {Number} [maxDepth=Infinity] - The maximum number of folders to walk down into.\n * @property {shaper} [shaper] - A shaper cb.\n */\n\n/**\n * Create an async generator that iterates over all folders and directories inside of `dirs`.\n *\n * @async\n * @generator\n * @function\n * @public\n * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths.\n * @param {?(Opts)} opts - Options used for the directory walk.\n *\n * @yields {Promise} - An async iterator that returns anything.\n */\nasync function * asyncFolderWalker (dirs, opts) {\n opts = Object.assign({\n fs,\n pathFilter,\n statFilter,\n maxDepth: Infinity,\n shaper\n }, opts)\n\n const roots = [dirs].flat().filter(opts.pathFilter)\n const pending = []\n\n while (roots.length) {\n const root = roots.shift()\n pending.push(root)\n\n while (pending.length) {\n const current = pending.shift()\n if (typeof current === 'undefined') continue\n const st = await lstat(current)\n if ((!st.isDirectory() || depthLimiter(current, root, opts.maxDepth)) && opts.statFilter(st)) {\n yield opts.shaper(fwShape(root, current, st))\n continue\n }\n\n const files = await readdir(current)\n files.sort()\n\n for (const file of files) {\n var next = path.join(current, file)\n if (opts.pathFilter(next)) pending.unshift(next)\n }\n if (current === root || !opts.statFilter(st)) continue\n else yield opts.shaper(fwShape(root, current, st))\n }\n }\n}\n\n/**\n * Generates the same shape as the folder-walker module.\n *\n * @function\n * @private\n * @param {String} root - Root filepath.\n * @param {String} name - Target filepath.\n * @param {Object} st - fs.Stat object.\n *\n * @return {FWStats} - Folder walker object.\n */\nfunction fwShape (root, name, st) {\n return {\n root: root,\n filepath: name,\n stat: st,\n relname: root === name ? path.basename(name) : path.relative(root, name),\n basename: path.basename(name)\n }\n}\n\n/**\n * Test if we are at maximum directory depth.\n *\n * @private\n * @function\n * @param {String} filePath - The resolved path of the target fille.\n * @param {String} relativeTo - The root directory of the current walk.\n * @param {Number} maxDepth - The maximum number of folders to descend into.\n *\n * @returns {Boolean} - Return true to signal stop descending.\n */\nfunction depthLimiter (filePath, relativeTo, maxDepth) {\n if (maxDepth === Infinity) return false\n const rootDepth = relativeTo.split(path.sep).length\n const fileDepth = filePath.split(path.sep).length\n return fileDepth - rootDepth > maxDepth\n}\n\n/**\n * Async iterable collector\n *\n * @async\n * @function\n * @private\n */\nasync function all (iterator) {\n const collect = []\n\n for await (const result of iterator) {\n collect.push(result)\n }\n\n return collect\n}\n\n/**\n * allFiles gives you all files from the directory walk as an array.\n *\n * @async\n * @function\n * @public\n * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths.\n * @param {?(Opts)} opts - Options used for the directory walk.\n *\n * @returns {Promise} - An async iterator that returns anything.\n */\nasync function allFiles (...args) {\n return all(asyncFolderWalker(...args))\n}\n\nmodule.exports = {\n asyncFolderWalker,\n allFiles,\n all\n}\n","module.exports = class FixedFIFO {\n constructor (hwm) {\n if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two')\n this.buffer = new Array(hwm)\n this.mask = hwm - 1\n this.top = 0\n this.btm = 0\n this.next = null\n }\n\n push (data) {\n if (this.buffer[this.top] !== undefined) return false\n this.buffer[this.top] = data\n this.top = (this.top + 1) & this.mask\n return true\n }\n\n shift () {\n const last = this.buffer[this.btm]\n if (last === undefined) return undefined\n this.buffer[this.btm] = undefined\n this.btm = (this.btm + 1) & this.mask\n return last\n }\n\n isEmpty () {\n return this.buffer[this.btm] === undefined\n }\n}\n","const { EventEmitter } = require('events')\nconst STREAM_DESTROYED = new Error('Stream was destroyed')\n\nconst FIFO = require('fast-fifo')\n\n/* eslint-disable no-multi-spaces */\n\nconst MAX = ((1 << 25) - 1)\n\n// Shared state\nconst OPENING = 0b001\nconst DESTROYING = 0b010\nconst DESTROYED = 0b100\n\nconst NOT_OPENING = MAX ^ OPENING\n\n// Read state\nconst READ_ACTIVE = 0b0000000000001 << 3\nconst READ_PRIMARY = 0b0000000000010 << 3\nconst READ_SYNC = 0b0000000000100 << 3\nconst READ_QUEUED = 0b0000000001000 << 3\nconst READ_RESUMED = 0b0000000010000 << 3\nconst READ_PIPE_DRAINED = 0b0000000100000 << 3\nconst READ_ENDING = 0b0000001000000 << 3\nconst READ_EMIT_DATA = 0b0000010000000 << 3\nconst READ_EMIT_READABLE = 0b0000100000000 << 3\nconst READ_EMITTED_READABLE = 0b0001000000000 << 3\nconst READ_DONE = 0b0010000000000 << 3\nconst READ_NEXT_TICK = 0b0100000000001 << 3 // also active\nconst READ_NEEDS_PUSH = 0b1000000000000 << 3\n\nconst READ_NOT_ACTIVE = MAX ^ READ_ACTIVE\nconst READ_NON_PRIMARY = MAX ^ READ_PRIMARY\nconst READ_NON_PRIMARY_AND_PUSHED = MAX ^ (READ_PRIMARY | READ_NEEDS_PUSH)\nconst READ_NOT_SYNC = MAX ^ READ_SYNC\nconst READ_PUSHED = MAX ^ READ_NEEDS_PUSH\nconst READ_PAUSED = MAX ^ READ_RESUMED\nconst READ_NOT_QUEUED = MAX ^ (READ_QUEUED | READ_EMITTED_READABLE)\nconst READ_NOT_ENDING = MAX ^ READ_ENDING\nconst READ_PIPE_NOT_DRAINED = MAX ^ (READ_RESUMED | READ_PIPE_DRAINED)\nconst READ_NOT_NEXT_TICK = MAX ^ READ_NEXT_TICK\n\n// Write state\nconst WRITE_ACTIVE = 0b000000001 << 16\nconst WRITE_PRIMARY = 0b000000010 << 16\nconst WRITE_SYNC = 0b000000100 << 16\nconst WRITE_QUEUED = 0b000001000 << 16\nconst WRITE_UNDRAINED = 0b000010000 << 16\nconst WRITE_DONE = 0b000100000 << 16\nconst WRITE_EMIT_DRAIN = 0b001000000 << 16\nconst WRITE_NEXT_TICK = 0b010000001 << 16 // also active\nconst WRITE_FINISHING = 0b100000000 << 16\n\nconst WRITE_NOT_ACTIVE = MAX ^ WRITE_ACTIVE\nconst WRITE_NOT_SYNC = MAX ^ WRITE_SYNC\nconst WRITE_NON_PRIMARY = MAX ^ WRITE_PRIMARY\nconst WRITE_NOT_FINISHING = MAX ^ WRITE_FINISHING\nconst WRITE_DRAINED = MAX ^ WRITE_UNDRAINED\nconst WRITE_NOT_QUEUED = MAX ^ WRITE_QUEUED\nconst WRITE_NOT_NEXT_TICK = MAX ^ WRITE_NEXT_TICK\n\n// Combined shared state\nconst ACTIVE = READ_ACTIVE | WRITE_ACTIVE\nconst NOT_ACTIVE = MAX ^ ACTIVE\nconst DONE = READ_DONE | WRITE_DONE\nconst DESTROY_STATUS = DESTROYING | DESTROYED\nconst OPEN_STATUS = DESTROY_STATUS | OPENING\nconst AUTO_DESTROY = DESTROY_STATUS | DONE\nconst NON_PRIMARY = WRITE_NON_PRIMARY & READ_NON_PRIMARY\n\n// Combined read state\nconst READ_PRIMARY_STATUS = OPEN_STATUS | READ_ENDING | READ_DONE\nconst READ_STATUS = OPEN_STATUS | READ_DONE | READ_QUEUED\nconst READ_FLOWING = READ_RESUMED | READ_PIPE_DRAINED\nconst READ_ACTIVE_AND_SYNC = READ_ACTIVE | READ_SYNC\nconst READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH = READ_ACTIVE | READ_SYNC | READ_NEEDS_PUSH\nconst READ_PRIMARY_AND_ACTIVE = READ_PRIMARY | READ_ACTIVE\nconst READ_ENDING_STATUS = OPEN_STATUS | READ_ENDING | READ_QUEUED\nconst READ_EMIT_READABLE_AND_QUEUED = READ_EMIT_READABLE | READ_QUEUED\nconst READ_READABLE_STATUS = OPEN_STATUS | READ_EMIT_READABLE | READ_QUEUED | READ_EMITTED_READABLE\nconst SHOULD_NOT_READ = OPEN_STATUS | READ_ACTIVE | READ_ENDING | READ_DONE | READ_NEEDS_PUSH\nconst READ_BACKPRESSURE_STATUS = DESTROY_STATUS | READ_ENDING | READ_DONE\n\n// Combined write state\nconst WRITE_PRIMARY_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_DONE\nconst WRITE_QUEUED_AND_UNDRAINED = WRITE_QUEUED | WRITE_UNDRAINED\nconst WRITE_QUEUED_AND_ACTIVE = WRITE_QUEUED | WRITE_ACTIVE\nconst WRITE_DRAIN_STATUS = WRITE_QUEUED | WRITE_UNDRAINED | OPEN_STATUS | WRITE_ACTIVE\nconst WRITE_STATUS = OPEN_STATUS | WRITE_ACTIVE | WRITE_QUEUED\nconst WRITE_PRIMARY_AND_ACTIVE = WRITE_PRIMARY | WRITE_ACTIVE\nconst WRITE_ACTIVE_AND_SYNC = WRITE_ACTIVE | WRITE_SYNC\nconst WRITE_FINISHING_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_QUEUED\nconst WRITE_BACKPRESSURE_STATUS = WRITE_UNDRAINED | DESTROY_STATUS | WRITE_FINISHING | WRITE_DONE\n\nconst asyncIterator = Symbol.asyncIterator || Symbol('asyncIterator')\n\nclass WritableState {\n constructor (stream, { highWaterMark = 16384, map = null, mapWritable, byteLength, byteLengthWritable } = {}) {\n this.stream = stream\n this.queue = new FIFO()\n this.highWaterMark = highWaterMark\n this.buffered = 0\n this.error = null\n this.pipeline = null\n this.byteLength = byteLengthWritable || byteLength || defaultByteLength\n this.map = mapWritable || map\n this.afterWrite = afterWrite.bind(this)\n }\n\n push (data) {\n if (this.map !== null) data = this.map(data)\n\n this.buffered += this.byteLength(data)\n this.queue.push(data)\n\n if (this.buffered < this.highWaterMark) {\n this.stream._duplexState |= WRITE_QUEUED\n return true\n }\n\n this.stream._duplexState |= WRITE_QUEUED_AND_UNDRAINED\n return false\n }\n\n shift () {\n const data = this.queue.shift()\n const stream = this.stream\n\n this.buffered -= this.byteLength(data)\n if (this.buffered === 0) stream._duplexState &= WRITE_NOT_QUEUED\n\n return data\n }\n\n end (data) {\n if (data !== undefined && data !== null) this.push(data)\n this.stream._duplexState = (this.stream._duplexState | WRITE_FINISHING) & WRITE_NON_PRIMARY\n }\n\n autoBatch (data, cb) {\n const buffer = []\n const stream = this.stream\n\n buffer.push(data)\n while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) {\n buffer.push(stream._writableState.shift())\n }\n\n if ((stream._duplexState & OPEN_STATUS) !== 0) return cb(null)\n stream._writev(buffer, cb)\n }\n\n update () {\n const stream = this.stream\n\n while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED) {\n const data = this.shift()\n stream._duplexState |= WRITE_ACTIVE_AND_SYNC\n stream._write(data, this.afterWrite)\n stream._duplexState &= WRITE_NOT_SYNC\n }\n\n if ((stream._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary()\n }\n\n updateNonPrimary () {\n const stream = this.stream\n\n if ((stream._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) {\n stream._duplexState = (stream._duplexState | WRITE_ACTIVE) & WRITE_NOT_FINISHING\n stream._final(afterFinal.bind(this))\n return\n }\n\n if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) {\n if ((stream._duplexState & ACTIVE) === 0) {\n stream._duplexState |= ACTIVE\n stream._destroy(afterDestroy.bind(this))\n }\n return\n }\n\n if ((stream._duplexState & OPEN_STATUS) === OPENING) {\n stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING\n stream._open(afterOpen.bind(this))\n }\n }\n\n updateNextTick () {\n if ((this.stream._duplexState & WRITE_NEXT_TICK) !== 0) return\n this.stream._duplexState |= WRITE_NEXT_TICK\n process.nextTick(updateWriteNT, this)\n }\n}\n\nclass ReadableState {\n constructor (stream, { highWaterMark = 16384, map = null, mapReadable, byteLength, byteLengthReadable } = {}) {\n this.stream = stream\n this.queue = new FIFO()\n this.highWaterMark = highWaterMark\n this.buffered = 0\n this.error = null\n this.pipeline = null\n this.byteLength = byteLengthReadable || byteLength || defaultByteLength\n this.map = mapReadable || map\n this.pipeTo = null\n this.afterRead = afterRead.bind(this)\n }\n\n pipe (pipeTo, cb) {\n if (this.pipeTo !== null) throw new Error('Can only pipe to one destination')\n\n this.stream._duplexState |= READ_PIPE_DRAINED\n this.pipeTo = pipeTo\n this.pipeline = new Pipeline(this.stream, pipeTo, cb || null)\n\n if (cb) this.stream.on('error', noop) // We already error handle this so supress crashes\n\n if (isStreamx(pipeTo)) {\n pipeTo._writableState.pipeline = this.pipeline\n if (cb) pipeTo.on('error', noop) // We already error handle this so supress crashes\n pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) // TODO: just call finished from pipeTo itself\n } else {\n const onerror = this.pipeline.done.bind(this.pipeline, pipeTo)\n const onclose = this.pipeline.done.bind(this.pipeline, pipeTo, null) // onclose has a weird bool arg\n pipeTo.on('error', onerror)\n pipeTo.on('close', onclose)\n pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline))\n }\n\n pipeTo.on('drain', afterDrain.bind(this))\n this.stream.emit('pipe', pipeTo)\n }\n\n push (data) {\n const stream = this.stream\n\n if (data === null) {\n this.highWaterMark = 0\n stream._duplexState = (stream._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED\n return false\n }\n\n if (this.map !== null) data = this.map(data)\n this.buffered += this.byteLength(data)\n this.queue.push(data)\n\n stream._duplexState = (stream._duplexState | READ_QUEUED) & READ_PUSHED\n\n return this.buffered < this.highWaterMark\n }\n\n shift () {\n const data = this.queue.shift()\n\n this.buffered -= this.byteLength(data)\n if (this.buffered === 0) this.stream._duplexState &= READ_NOT_QUEUED\n return data\n }\n\n unshift (data) {\n let tail\n const pending = []\n\n while ((tail === this.queue.shift()) !== undefined) {\n pending.push(tail)\n }\n\n this.push(data)\n\n for (let i = 0; i < pending.length; i++) {\n this.queue.push(pending[i])\n }\n }\n\n read () {\n const stream = this.stream\n\n if ((stream._duplexState & READ_STATUS) === READ_QUEUED) {\n const data = this.shift()\n if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data)\n if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED\n return data\n }\n\n return null\n }\n\n drain () {\n const stream = this.stream\n\n while ((stream._duplexState & READ_STATUS) === READ_QUEUED && (stream._duplexState & READ_FLOWING) !== 0) {\n const data = this.shift()\n if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data)\n if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED\n }\n }\n\n update () {\n const stream = this.stream\n\n this.drain()\n\n while (this.buffered < this.highWaterMark && (stream._duplexState & SHOULD_NOT_READ) === 0) {\n stream._duplexState |= READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH\n stream._read(this.afterRead)\n stream._duplexState &= READ_NOT_SYNC\n if ((stream._duplexState & READ_ACTIVE) === 0) this.drain()\n }\n\n if ((stream._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) {\n stream._duplexState |= READ_EMITTED_READABLE\n stream.emit('readable')\n }\n\n if ((stream._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary()\n }\n\n updateNonPrimary () {\n const stream = this.stream\n\n if ((stream._duplexState & READ_ENDING_STATUS) === READ_ENDING) {\n stream._duplexState = (stream._duplexState | READ_DONE) & READ_NOT_ENDING\n stream.emit('end')\n if ((stream._duplexState & AUTO_DESTROY) === DONE) stream._duplexState |= DESTROYING\n if (this.pipeTo !== null) this.pipeTo.end()\n }\n\n if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) {\n if ((stream._duplexState & ACTIVE) === 0) {\n stream._duplexState |= ACTIVE\n stream._destroy(afterDestroy.bind(this))\n }\n return\n }\n\n if ((stream._duplexState & OPEN_STATUS) === OPENING) {\n stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING\n stream._open(afterOpen.bind(this))\n }\n }\n\n updateNextTick () {\n if ((this.stream._duplexState & READ_NEXT_TICK) !== 0) return\n this.stream._duplexState |= READ_NEXT_TICK\n process.nextTick(updateReadNT, this)\n }\n}\n\nclass TransformState {\n constructor (stream) {\n this.data = null\n this.afterTransform = afterTransform.bind(stream)\n this.afterFinal = null\n }\n}\n\nclass Pipeline {\n constructor (src, dst, cb) {\n this.from = src\n this.to = dst\n this.afterPipe = cb\n this.error = null\n this.pipeToFinished = false\n }\n\n finished () {\n this.pipeToFinished = true\n }\n\n done (stream, err) {\n if (err) this.error = err\n\n if (stream === this.to) {\n this.to = null\n\n if (this.from !== null) {\n if ((this.from._duplexState & READ_DONE) === 0 || !this.pipeToFinished) {\n this.from.destroy(new Error('Writable stream closed prematurely'))\n }\n return\n }\n }\n\n if (stream === this.from) {\n this.from = null\n\n if (this.to !== null) {\n if ((stream._duplexState & READ_DONE) === 0) {\n this.to.destroy(new Error('Readable stream closed before ending'))\n }\n return\n }\n }\n\n if (this.afterPipe !== null) this.afterPipe(this.error)\n this.to = this.from = this.afterPipe = null\n }\n}\n\nfunction afterDrain () {\n this.stream._duplexState |= READ_PIPE_DRAINED\n if ((this.stream._duplexState & READ_ACTIVE_AND_SYNC) === 0) this.updateNextTick()\n}\n\nfunction afterFinal (err) {\n const stream = this.stream\n if (err) stream.destroy(err)\n if ((stream._duplexState & DESTROY_STATUS) === 0) {\n stream._duplexState |= WRITE_DONE\n stream.emit('finish')\n }\n if ((stream._duplexState & AUTO_DESTROY) === DONE) {\n stream._duplexState |= DESTROYING\n }\n\n stream._duplexState &= WRITE_NOT_ACTIVE\n this.update()\n}\n\nfunction afterDestroy (err) {\n const stream = this.stream\n\n if (!err && this.error !== STREAM_DESTROYED) err = this.error\n if (err) stream.emit('error', err)\n stream._duplexState |= DESTROYED\n stream.emit('close')\n\n const rs = stream._readableState\n const ws = stream._writableState\n\n if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream, err)\n if (ws !== null && ws.pipeline !== null) ws.pipeline.done(stream, err)\n}\n\nfunction afterWrite (err) {\n const stream = this.stream\n\n if (err) stream.destroy(err)\n stream._duplexState &= WRITE_NOT_ACTIVE\n\n if ((stream._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) {\n stream._duplexState &= WRITE_DRAINED\n if ((stream._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) {\n stream.emit('drain')\n }\n }\n\n if ((stream._duplexState & WRITE_SYNC) === 0) this.update()\n}\n\nfunction afterRead (err) {\n if (err) this.stream.destroy(err)\n this.stream._duplexState &= READ_NOT_ACTIVE\n if ((this.stream._duplexState & READ_SYNC) === 0) this.update()\n}\n\nfunction updateReadNT (rs) {\n rs.stream._duplexState &= READ_NOT_NEXT_TICK\n rs.update()\n}\n\nfunction updateWriteNT (ws) {\n ws.stream._duplexState &= WRITE_NOT_NEXT_TICK\n ws.update()\n}\n\nfunction afterOpen (err) {\n const stream = this.stream\n\n if (err) stream.destroy(err)\n\n if ((stream._duplexState & DESTROYING) === 0) {\n if ((stream._duplexState & READ_PRIMARY_STATUS) === 0) stream._duplexState |= READ_PRIMARY\n if ((stream._duplexState & WRITE_PRIMARY_STATUS) === 0) stream._duplexState |= WRITE_PRIMARY\n stream.emit('open')\n }\n\n stream._duplexState &= NOT_ACTIVE\n\n if (stream._writableState !== null) {\n stream._writableState.update()\n }\n\n if (stream._readableState !== null) {\n stream._readableState.update()\n }\n}\n\nfunction afterTransform (err, data) {\n if (data !== undefined && data !== null) this.push(data)\n this._writableState.afterWrite(err)\n}\n\nclass Stream extends EventEmitter {\n constructor (opts) {\n super()\n\n this._duplexState = 0\n this._readableState = null\n this._writableState = null\n\n if (opts) {\n if (opts.open) this._open = opts.open\n if (opts.destroy) this._destroy = opts.destroy\n if (opts.predestroy) this._predestroy = opts.predestroy\n }\n }\n\n _open (cb) {\n cb(null)\n }\n\n _destroy (cb) {\n cb(null)\n }\n\n _predestroy () {\n // does nothing\n }\n\n get readable () {\n return this._readableState !== null\n }\n\n get writable () {\n return this._writableState !== null\n }\n\n get destroyed () {\n return (this._duplexState & DESTROYED) !== 0\n }\n\n get destroying () {\n return (this._duplexState & DESTROY_STATUS) !== 0\n }\n\n destroy (err) {\n if ((this._duplexState & DESTROY_STATUS) === 0) {\n if (!err) err = STREAM_DESTROYED\n this._duplexState = (this._duplexState | DESTROYING) & NON_PRIMARY\n this._predestroy()\n if (this._readableState !== null) {\n this._readableState.error = err\n this._readableState.updateNextTick()\n }\n if (this._writableState !== null) {\n this._writableState.error = err\n this._writableState.updateNextTick()\n }\n }\n }\n\n on (name, fn) {\n if (this._readableState !== null) {\n if (name === 'data') {\n this._duplexState |= (READ_EMIT_DATA | READ_RESUMED)\n this._readableState.updateNextTick()\n }\n if (name === 'readable') {\n this._duplexState |= READ_EMIT_READABLE\n this._readableState.updateNextTick()\n }\n }\n\n if (this._writableState !== null) {\n if (name === 'drain') {\n this._duplexState |= WRITE_EMIT_DRAIN\n this._writableState.updateNextTick()\n }\n }\n\n return super.on(name, fn)\n }\n}\n\nclass Readable extends Stream {\n constructor (opts) {\n super(opts)\n\n this._duplexState |= OPENING | WRITE_DONE\n this._readableState = new ReadableState(this, opts)\n\n if (opts) {\n if (opts.read) this._read = opts.read\n }\n }\n\n _read (cb) {\n cb(null)\n }\n\n pipe (dest, cb) {\n this._readableState.pipe(dest, cb)\n this._readableState.updateNextTick()\n return dest\n }\n\n read () {\n this._readableState.updateNextTick()\n return this._readableState.read()\n }\n\n push (data) {\n this._readableState.updateNextTick()\n return this._readableState.push(data)\n }\n\n unshift (data) {\n this._readableState.updateNextTick()\n return this._readableState.unshift(data)\n }\n\n resume () {\n this._duplexState |= READ_RESUMED\n this._readableState.updateNextTick()\n }\n\n pause () {\n this._duplexState &= READ_PAUSED\n }\n\n static _fromAsyncIterator (ite) {\n let destroy\n\n const rs = new Readable({\n read (cb) {\n ite.next().then(push).then(cb.bind(null, null)).catch(cb)\n },\n predestroy () {\n destroy = ite.return()\n },\n destroy (cb) {\n destroy.then(cb.bind(null, null)).catch(cb)\n }\n })\n\n return rs\n\n function push (data) {\n if (data.done) rs.push(null)\n else rs.push(data.value)\n }\n }\n\n static from (data) {\n if (data[asyncIterator]) return this._fromAsyncIterator(data[asyncIterator]())\n if (!Array.isArray(data)) data = data === undefined ? [] : [data]\n\n let i = 0\n return new Readable({\n read (cb) {\n this.push(i === data.length ? null : data[i++])\n cb(null)\n }\n })\n }\n\n static isBackpressured (rs) {\n return (rs._duplexState & READ_BACKPRESSURE_STATUS) !== 0 || rs._readableState.buffered >= rs._readableState.highWaterMark\n }\n\n [asyncIterator] () {\n const stream = this\n\n let error = null\n let ended = false\n let promiseResolve\n let promiseReject\n\n this.on('error', (err) => { error = err })\n this.on('end', () => { ended = true })\n this.on('close', () => call(error, null))\n this.on('readable', () => call(null, stream.read()))\n\n return {\n [asyncIterator] () {\n return this\n },\n next () {\n return new Promise(function (resolve, reject) {\n promiseResolve = resolve\n promiseReject = reject\n const data = stream.read()\n if (data !== null) call(null, data)\n })\n },\n return () {\n stream.destroy()\n }\n }\n\n function call (err, data) {\n if (promiseReject === null) return\n if (err) promiseReject(err)\n else if (data === null && !ended) promiseReject(STREAM_DESTROYED)\n else promiseResolve({ value: data, done: data === null })\n promiseReject = promiseResolve = null\n }\n }\n}\n\nclass Writable extends Stream {\n constructor (opts) {\n super(opts)\n\n this._duplexState |= OPENING | READ_DONE\n this._writableState = new WritableState(this, opts)\n\n if (opts) {\n if (opts.writev) this._writev = opts.writev\n if (opts.write) this._write = opts.write\n if (opts.final) this._final = opts.final\n }\n }\n\n _writev (batch, cb) {\n cb(null)\n }\n\n _write (data, cb) {\n this._writableState.autoBatch(data, cb)\n }\n\n _final (cb) {\n cb(null)\n }\n\n static isBackpressured (ws) {\n return (ws._duplexState & WRITE_BACKPRESSURE_STATUS) !== 0\n }\n\n write (data) {\n this._writableState.updateNextTick()\n return this._writableState.push(data)\n }\n\n end (data) {\n this._writableState.updateNextTick()\n this._writableState.end(data)\n }\n}\n\nclass Duplex extends Readable { // and Writable\n constructor (opts) {\n super(opts)\n\n this._duplexState = OPENING\n this._writableState = new WritableState(this, opts)\n\n if (opts) {\n if (opts.writev) this._writev = opts.writev\n if (opts.write) this._write = opts.write\n if (opts.final) this._final = opts.final\n }\n }\n\n _writev (batch, cb) {\n cb(null)\n }\n\n _write (data, cb) {\n this._writableState.autoBatch(data, cb)\n }\n\n _final (cb) {\n cb(null)\n }\n\n write (data) {\n this._writableState.updateNextTick()\n return this._writableState.push(data)\n }\n\n end (data) {\n this._writableState.updateNextTick()\n this._writableState.end(data)\n }\n}\n\nclass Transform extends Duplex {\n constructor (opts) {\n super(opts)\n this._transformState = new TransformState(this)\n\n if (opts) {\n if (opts.transform) this._transform = opts.transform\n if (opts.flush) this._flush = opts.flush\n }\n }\n\n _write (data, cb) {\n if (this._readableState.buffered >= this._readableState.highWaterMark) {\n this._transformState.data = data\n } else {\n this._transform(data, this._transformState.afterTransform)\n }\n }\n\n _read (cb) {\n if (this._transformState.data !== null) {\n const data = this._transformState.data\n this._transformState.data = null\n cb(null)\n this._transform(data, this._transformState.afterTransform)\n } else {\n cb(null)\n }\n }\n\n _transform (data, cb) {\n cb(null, data)\n }\n\n _flush (cb) {\n cb(null)\n }\n\n _final (cb) {\n this._transformState.afterFinal = cb\n this._flush(transformAfterFlush.bind(this))\n }\n}\n\nfunction transformAfterFlush (err, data) {\n const cb = this._transformState.afterFinal\n if (err) return cb(err)\n if (data !== null && data !== undefined) this.push(data)\n this.push(null)\n cb(null)\n}\n\nfunction isStream (stream) {\n return !!stream._readableState || !!stream._writableState\n}\n\nfunction isStreamx (stream) {\n return typeof stream._duplexState === 'number' && isStream(stream)\n}\n\nfunction defaultByteLength (data) {\n return Buffer.isBuffer(data) ? data.length : 1024\n}\n\nfunction noop () {}\n\nmodule.exports = {\n isStream,\n isStreamx,\n Stream,\n Writable,\n Readable,\n Duplex,\n Transform\n}\n","var pump = require('pump')\nvar inherits = require('inherits')\nvar Duplexify = require('duplexify')\n\nvar toArray = function(args) {\n if (!args.length) return []\n return Array.isArray(args[0]) ? args[0] : Array.prototype.slice.call(args)\n}\n\nvar define = function(opts) {\n var Pumpify = function() {\n var streams = toArray(arguments)\n if (!(this instanceof Pumpify)) return new Pumpify(streams)\n Duplexify.call(this, null, null, opts)\n if (streams.length) this.setPipeline(streams)\n }\n\n inherits(Pumpify, Duplexify)\n\n Pumpify.prototype.setPipeline = function() {\n var streams = toArray(arguments)\n var self = this\n var ended = false\n var w = streams[0]\n var r = streams[streams.length-1]\n\n r = r.readable ? r : null\n w = w.writable ? w : null\n\n var onclose = function() {\n streams[0].emit('error', new Error('stream was destroyed'))\n }\n\n this.on('close', onclose)\n this.on('prefinish', function() {\n if (!ended) self.cork()\n })\n\n pump(streams, function(err) {\n self.removeListener('close', onclose)\n if (err) return self.destroy(err.message === 'premature close' ? null : err)\n ended = true\n // pump ends after the last stream is not writable *but*\n // pumpify still forwards the readable part so we need to catch errors\n // still, so reenable autoDestroy in this case\n if (self._autoDestroy === false) self._autoDestroy = true\n self.uncork()\n })\n\n if (this.destroyed) return onclose()\n this.setWritable(w)\n this.setReadable(r)\n }\n\n return Pumpify\n}\n\nmodule.exports = define({autoDestroy:false, destroy:false})\nmodule.exports.obj = define({autoDestroy: false, destroy:false, objectMode:true, highWaterMark:16})\nmodule.exports.ctor = define\n","module.exports = require(\"fs\");","var defer = require('./defer.js');\n\n// API\nmodule.exports = async;\n\n/**\n * Runs provided callback asynchronously\n * even if callback itself is not\n *\n * @param {function} callback - callback to invoke\n * @returns {function} - augmented callback\n */\nfunction async(callback)\n{\n var isAsync = false;\n\n // check if async happened\n defer(function() { isAsync = true; });\n\n return function async_callback(err, result)\n {\n if (isAsync)\n {\n callback(err, result);\n }\n else\n {\n defer(function nextTick_callback()\n {\n callback(err, result);\n });\n }\n };\n}\n","'use strict';\n\nvar utils = require('./utils');\n\nvar has = Object.prototype.hasOwnProperty;\nvar isArray = Array.isArray;\n\nvar defaults = {\n allowDots: false,\n allowPrototypes: false,\n arrayLimit: 20,\n charset: 'utf-8',\n charsetSentinel: false,\n comma: false,\n decoder: utils.decode,\n delimiter: '&',\n depth: 5,\n ignoreQueryPrefix: false,\n interpretNumericEntities: false,\n parameterLimit: 1000,\n parseArrays: true,\n plainObjects: false,\n strictNullHandling: false\n};\n\nvar interpretNumericEntities = function (str) {\n return str.replace(/&#(\\d+);/g, function ($0, numberStr) {\n return String.fromCharCode(parseInt(numberStr, 10));\n });\n};\n\n// This is what browsers will submit when the ✓ character occurs in an\n// application/x-www-form-urlencoded body and the encoding of the page containing\n// the form is iso-8859-1, or when the submitted form has an accept-charset\n// attribute of iso-8859-1. Presumably also with other charsets that do not contain\n// the ✓ character, such as us-ascii.\nvar isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓')\n\n// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded.\nvar charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓')\n\nvar parseValues = function parseQueryStringValues(str, options) {\n var obj = {};\n var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\\?/, '') : str;\n var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit;\n var parts = cleanStr.split(options.delimiter, limit);\n var skipIndex = -1; // Keep track of where the utf8 sentinel was found\n var i;\n\n var charset = options.charset;\n if (options.charsetSentinel) {\n for (i = 0; i < parts.length; ++i) {\n if (parts[i].indexOf('utf8=') === 0) {\n if (parts[i] === charsetSentinel) {\n charset = 'utf-8';\n } else if (parts[i] === isoSentinel) {\n charset = 'iso-8859-1';\n }\n skipIndex = i;\n i = parts.length; // The eslint settings do not allow break;\n }\n }\n }\n\n for (i = 0; i < parts.length; ++i) {\n if (i === skipIndex) {\n continue;\n }\n var part = parts[i];\n\n var bracketEqualsPos = part.indexOf(']=');\n var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1;\n\n var key, val;\n if (pos === -1) {\n key = options.decoder(part, defaults.decoder, charset, 'key');\n val = options.strictNullHandling ? null : '';\n } else {\n key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key');\n val = options.decoder(part.slice(pos + 1), defaults.decoder, charset, 'value');\n }\n\n if (val && options.interpretNumericEntities && charset === 'iso-8859-1') {\n val = interpretNumericEntities(val);\n }\n\n if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) {\n val = val.split(',');\n }\n\n if (part.indexOf('[]=') > -1) {\n val = isArray(val) ? [val] : val;\n }\n\n if (has.call(obj, key)) {\n obj[key] = utils.combine(obj[key], val);\n } else {\n obj[key] = val;\n }\n }\n\n return obj;\n};\n\nvar parseObject = function (chain, val, options) {\n var leaf = val;\n\n for (var i = chain.length - 1; i >= 0; --i) {\n var obj;\n var root = chain[i];\n\n if (root === '[]' && options.parseArrays) {\n obj = [].concat(leaf);\n } else {\n obj = options.plainObjects ? Object.create(null) : {};\n var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root;\n var index = parseInt(cleanRoot, 10);\n if (!options.parseArrays && cleanRoot === '') {\n obj = { 0: leaf };\n } else if (\n !isNaN(index)\n && root !== cleanRoot\n && String(index) === cleanRoot\n && index >= 0\n && (options.parseArrays && index <= options.arrayLimit)\n ) {\n obj = [];\n obj[index] = leaf;\n } else {\n obj[cleanRoot] = leaf;\n }\n }\n\n leaf = obj;\n }\n\n return leaf;\n};\n\nvar parseKeys = function parseQueryStringKeys(givenKey, val, options) {\n if (!givenKey) {\n return;\n }\n\n // Transform dot notation to bracket notation\n var key = options.allowDots ? givenKey.replace(/\\.([^.[]+)/g, '[$1]') : givenKey;\n\n // The regex chunks\n\n var brackets = /(\\[[^[\\]]*])/;\n var child = /(\\[[^[\\]]*])/g;\n\n // Get the parent\n\n var segment = options.depth > 0 && brackets.exec(key);\n var parent = segment ? key.slice(0, segment.index) : key;\n\n // Stash the parent if it exists\n\n var keys = [];\n if (parent) {\n // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties\n if (!options.plainObjects && has.call(Object.prototype, parent)) {\n if (!options.allowPrototypes) {\n return;\n }\n }\n\n keys.push(parent);\n }\n\n // Loop through children appending to the array until we hit depth\n\n var i = 0;\n while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) {\n i += 1;\n if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) {\n if (!options.allowPrototypes) {\n return;\n }\n }\n keys.push(segment[1]);\n }\n\n // If there's a remainder, just add whatever is left\n\n if (segment) {\n keys.push('[' + key.slice(segment.index) + ']');\n }\n\n return parseObject(keys, val, options);\n};\n\nvar normalizeParseOptions = function normalizeParseOptions(opts) {\n if (!opts) {\n return defaults;\n }\n\n if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') {\n throw new TypeError('Decoder has to be a function.');\n }\n\n if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {\n throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined');\n }\n var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset;\n\n return {\n allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots,\n allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes,\n arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit,\n charset: charset,\n charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,\n comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma,\n decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder,\n delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter,\n // eslint-disable-next-line no-implicit-coercion, no-extra-parens\n depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth,\n ignoreQueryPrefix: opts.ignoreQueryPrefix === true,\n interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities,\n parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit,\n parseArrays: opts.parseArrays !== false,\n plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects,\n strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling\n };\n};\n\nmodule.exports = function (str, opts) {\n var options = normalizeParseOptions(opts);\n\n if (str === '' || str === null || typeof str === 'undefined') {\n return options.plainObjects ? Object.create(null) : {};\n }\n\n var tempObj = typeof str === 'string' ? parseValues(str, options) : str;\n var obj = options.plainObjects ? Object.create(null) : {};\n\n // Iterate over the keys and setup the new object\n\n var keys = Object.keys(tempObj);\n for (var i = 0; i < keys.length; ++i) {\n var key = keys[i];\n var newObj = parseKeys(key, tempObj[key], options);\n obj = utils.merge(obj, newObj, options);\n }\n\n return utils.compact(obj);\n};\n","module.exports = require(\"zlib\");","/*!\n * mime-types\n * Copyright(c) 2014 Jonathan Ong\n * Copyright(c) 2015 Douglas Christopher Wilson\n * MIT Licensed\n */\n\n'use strict'\n\n/**\n * Module dependencies.\n * @private\n */\n\nvar db = require('mime-db')\nvar extname = require('path').extname\n\n/**\n * Module variables.\n * @private\n */\n\nvar EXTRACT_TYPE_REGEXP = /^\\s*([^;\\s]*)(?:;|\\s|$)/\nvar TEXT_TYPE_REGEXP = /^text\\//i\n\n/**\n * Module exports.\n * @public\n */\n\nexports.charset = charset\nexports.charsets = { lookup: charset }\nexports.contentType = contentType\nexports.extension = extension\nexports.extensions = Object.create(null)\nexports.lookup = lookup\nexports.types = Object.create(null)\n\n// Populate the extensions/types maps\npopulateMaps(exports.extensions, exports.types)\n\n/**\n * Get the default charset for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction charset (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n var mime = match && db[match[1].toLowerCase()]\n\n if (mime && mime.charset) {\n return mime.charset\n }\n\n // default text/* to utf-8\n if (match && TEXT_TYPE_REGEXP.test(match[1])) {\n return 'UTF-8'\n }\n\n return false\n}\n\n/**\n * Create a full Content-Type header given a MIME type or extension.\n *\n * @param {string} str\n * @return {boolean|string}\n */\n\nfunction contentType (str) {\n // TODO: should this even be in this module?\n if (!str || typeof str !== 'string') {\n return false\n }\n\n var mime = str.indexOf('/') === -1\n ? exports.lookup(str)\n : str\n\n if (!mime) {\n return false\n }\n\n // TODO: use content-type or other module\n if (mime.indexOf('charset') === -1) {\n var charset = exports.charset(mime)\n if (charset) mime += '; charset=' + charset.toLowerCase()\n }\n\n return mime\n}\n\n/**\n * Get the default extension for a MIME type.\n *\n * @param {string} type\n * @return {boolean|string}\n */\n\nfunction extension (type) {\n if (!type || typeof type !== 'string') {\n return false\n }\n\n // TODO: use media-typer\n var match = EXTRACT_TYPE_REGEXP.exec(type)\n\n // get extensions\n var exts = match && exports.extensions[match[1].toLowerCase()]\n\n if (!exts || !exts.length) {\n return false\n }\n\n return exts[0]\n}\n\n/**\n * Lookup the MIME type for a file path/extension.\n *\n * @param {string} path\n * @return {boolean|string}\n */\n\nfunction lookup (path) {\n if (!path || typeof path !== 'string') {\n return false\n }\n\n // get the extension (\"ext\" or \".ext\" or full path)\n var extension = extname('x.' + path)\n .toLowerCase()\n .substr(1)\n\n if (!extension) {\n return false\n }\n\n return exports.types[extension] || false\n}\n\n/**\n * Populate the extensions and types maps.\n * @private\n */\n\nfunction populateMaps (extensions, types) {\n // source preference (least -> most)\n var preference = ['nginx', 'apache', undefined, 'iana']\n\n Object.keys(db).forEach(function forEachMimeType (type) {\n var mime = db[type]\n var exts = mime.extensions\n\n if (!exts || !exts.length) {\n return\n }\n\n // mime -> extensions\n extensions[type] = exts\n\n // extension -> mime\n for (var i = 0; i < exts.length; i++) {\n var extension = exts[i]\n\n if (types[extension]) {\n var from = preference.indexOf(db[types[extension]].source)\n var to = preference.indexOf(mime.source)\n\n if (types[extension] !== 'application/octet-stream' &&\n (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) {\n // skip the remapping\n continue\n }\n }\n\n // set the extension -> mime\n types[extension] = type\n }\n })\n}\n","async function handleResponse (response) {\n const contentType = response.headers.get('Content-Type');\n const isJSON = contentType && contentType.match(/json/);\n const data = isJSON ? await response.json() : await response.text();\n if (response.ok) return data;\n else {\n return isJSON\n ? Promise.reject(new JSONHTTPError(response, data))\n : Promise.reject(new TextHTTPError(response, data));\n }\n}\n\nclass HTTPError extends Error {\n constructor (response) {\n super(response.statusText);\n this.name = this.constructor.name;\n if (typeof Error.captureStackTrace === 'function') {\n Error.captureStackTrace(this, this.constructor);\n } else {\n this.stack = new Error(response.statusText).stack;\n }\n this.status = response.status;\n }\n}\n\nclass TextHTTPError extends HTTPError {\n constructor (response, data) {\n super(response);\n this.data = data;\n }\n}\n\nclass JSONHTTPError extends HTTPError {\n constructor (response, json) {\n super(response);\n this.json = json;\n }\n}\n\nmodule.exports = {\n handleResponse,\n HTTPError,\n TextHTTPError,\n JSONHTTPError\n};\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n// a duplex stream is just a stream that is both readable and writable.\n// Since JS doesn't have multiple prototypal inheritance, this class\n// prototypally inherits from Readable, and then parasitically from\n// Writable.\n'use strict';\n/**/\n\nvar objectKeys = Object.keys || function (obj) {\n var keys = [];\n\n for (var key in obj) {\n keys.push(key);\n }\n\n return keys;\n};\n/**/\n\n\nmodule.exports = Duplex;\n\nvar Readable = require('./_stream_readable');\n\nvar Writable = require('./_stream_writable');\n\nrequire('inherits')(Duplex, Readable);\n\n{\n // Allow the keys array to be GC'ed.\n var keys = objectKeys(Writable.prototype);\n\n for (var v = 0; v < keys.length; v++) {\n var method = keys[v];\n if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];\n }\n}\n\nfunction Duplex(options) {\n if (!(this instanceof Duplex)) return new Duplex(options);\n Readable.call(this, options);\n Writable.call(this, options);\n this.allowHalfOpen = true;\n\n if (options) {\n if (options.readable === false) this.readable = false;\n if (options.writable === false) this.writable = false;\n\n if (options.allowHalfOpen === false) {\n this.allowHalfOpen = false;\n this.once('end', onend);\n }\n }\n}\n\nObject.defineProperty(Duplex.prototype, 'writableHighWaterMark', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.highWaterMark;\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableBuffer', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState && this._writableState.getBuffer();\n }\n});\nObject.defineProperty(Duplex.prototype, 'writableLength', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n return this._writableState.length;\n }\n}); // the no-half-open enforcer\n\nfunction onend() {\n // If the writable side ended, then we're ok.\n if (this._writableState.ended) return; // no more data can be written.\n // But allow more writes to happen in this tick.\n\n process.nextTick(onEndNT, this);\n}\n\nfunction onEndNT(self) {\n self.end();\n}\n\nObject.defineProperty(Duplex.prototype, 'destroyed', {\n // making it explicit this property is not enumerable\n // because otherwise some prototype manipulation in\n // userland will fail\n enumerable: false,\n get: function get() {\n if (this._readableState === undefined || this._writableState === undefined) {\n return false;\n }\n\n return this._readableState.destroyed && this._writableState.destroyed;\n },\n set: function set(value) {\n // we ignore the value if the stream\n // has not been initialized yet\n if (this._readableState === undefined || this._writableState === undefined) {\n return;\n } // backward compatibility, the user is explicitly\n // managing destroyed\n\n\n this._readableState.destroyed = value;\n this._writableState.destroyed = value;\n }\n});","module.exports = require(\"url\");","/*!\n * mime-db\n * Copyright(c) 2014 Jonathan Ong\n * MIT Licensed\n */\n\n/**\n * Module exports.\n */\n\nmodule.exports = require('./db.json')\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n// a passthrough stream.\n// basically just the most minimal sort of Transform stream.\n// Every written chunk gets output as-is.\n'use strict';\n\nmodule.exports = PassThrough;\n\nvar Transform = require('./_stream_transform');\n\nrequire('inherits')(PassThrough, Transform);\n\nfunction PassThrough(options) {\n if (!(this instanceof PassThrough)) return new PassThrough(options);\n Transform.call(this, options);\n}\n\nPassThrough.prototype._transform = function (chunk, encoding, cb) {\n cb(null, chunk);\n};","var iterate = require('./lib/iterate.js')\n , initState = require('./lib/state.js')\n , terminator = require('./lib/terminator.js')\n ;\n\n// Public API\nmodule.exports = serialOrdered;\n// sorting helpers\nmodule.exports.ascending = ascending;\nmodule.exports.descending = descending;\n\n/**\n * Runs iterator over provided sorted array elements in series\n *\n * @param {array|object} list - array or object (named list) to iterate over\n * @param {function} iterator - iterator to run\n * @param {function} sortMethod - custom sort function\n * @param {function} callback - invoked when all elements processed\n * @returns {function} - jobs terminator\n */\nfunction serialOrdered(list, iterator, sortMethod, callback)\n{\n var state = initState(list, sortMethod);\n\n iterate(list, iterator, state, function iteratorHandler(error, result)\n {\n if (error)\n {\n callback(error, result);\n return;\n }\n\n state.index++;\n\n // are we there yet?\n if (state.index < (state['keyedList'] || list).length)\n {\n iterate(list, iterator, state, iteratorHandler);\n return;\n }\n\n // done here\n callback(null, state.results);\n });\n\n return terminator.bind(state, callback);\n}\n\n/*\n * -- Sort methods\n */\n\n/**\n * sort helper to sort array elements in ascending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction ascending(a, b)\n{\n return a < b ? -1 : a > b ? 1 : 0;\n}\n\n/**\n * sort helper to sort array elements in descending order\n *\n * @param {mixed} a - an item to compare\n * @param {mixed} b - an item to compare\n * @returns {number} - comparison result\n */\nfunction descending(a, b)\n{\n return -1 * ascending(a, b);\n}\n","'use strict';\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }\n\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }\n\nfunction _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nvar _require = require('buffer'),\n Buffer = _require.Buffer;\n\nvar _require2 = require('util'),\n inspect = _require2.inspect;\n\nvar custom = inspect && inspect.custom || 'inspect';\n\nfunction copyBuffer(src, target, offset) {\n Buffer.prototype.copy.call(src, target, offset);\n}\n\nmodule.exports =\n/*#__PURE__*/\nfunction () {\n function BufferList() {\n _classCallCheck(this, BufferList);\n\n this.head = null;\n this.tail = null;\n this.length = 0;\n }\n\n _createClass(BufferList, [{\n key: \"push\",\n value: function push(v) {\n var entry = {\n data: v,\n next: null\n };\n if (this.length > 0) this.tail.next = entry;else this.head = entry;\n this.tail = entry;\n ++this.length;\n }\n }, {\n key: \"unshift\",\n value: function unshift(v) {\n var entry = {\n data: v,\n next: this.head\n };\n if (this.length === 0) this.tail = entry;\n this.head = entry;\n ++this.length;\n }\n }, {\n key: \"shift\",\n value: function shift() {\n if (this.length === 0) return;\n var ret = this.head.data;\n if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;\n --this.length;\n return ret;\n }\n }, {\n key: \"clear\",\n value: function clear() {\n this.head = this.tail = null;\n this.length = 0;\n }\n }, {\n key: \"join\",\n value: function join(s) {\n if (this.length === 0) return '';\n var p = this.head;\n var ret = '' + p.data;\n\n while (p = p.next) {\n ret += s + p.data;\n }\n\n return ret;\n }\n }, {\n key: \"concat\",\n value: function concat(n) {\n if (this.length === 0) return Buffer.alloc(0);\n var ret = Buffer.allocUnsafe(n >>> 0);\n var p = this.head;\n var i = 0;\n\n while (p) {\n copyBuffer(p.data, ret, i);\n i += p.data.length;\n p = p.next;\n }\n\n return ret;\n } // Consumes a specified amount of bytes or characters from the buffered data.\n\n }, {\n key: \"consume\",\n value: function consume(n, hasStrings) {\n var ret;\n\n if (n < this.head.data.length) {\n // `slice` is the same for buffers and strings.\n ret = this.head.data.slice(0, n);\n this.head.data = this.head.data.slice(n);\n } else if (n === this.head.data.length) {\n // First chunk is a perfect match.\n ret = this.shift();\n } else {\n // Result spans more than one buffer.\n ret = hasStrings ? this._getString(n) : this._getBuffer(n);\n }\n\n return ret;\n }\n }, {\n key: \"first\",\n value: function first() {\n return this.head.data;\n } // Consumes a specified amount of characters from the buffered data.\n\n }, {\n key: \"_getString\",\n value: function _getString(n) {\n var p = this.head;\n var c = 1;\n var ret = p.data;\n n -= ret.length;\n\n while (p = p.next) {\n var str = p.data;\n var nb = n > str.length ? str.length : n;\n if (nb === str.length) ret += str;else ret += str.slice(0, n);\n n -= nb;\n\n if (n === 0) {\n if (nb === str.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = str.slice(nb);\n }\n\n break;\n }\n\n ++c;\n }\n\n this.length -= c;\n return ret;\n } // Consumes a specified amount of bytes from the buffered data.\n\n }, {\n key: \"_getBuffer\",\n value: function _getBuffer(n) {\n var ret = Buffer.allocUnsafe(n);\n var p = this.head;\n var c = 1;\n p.data.copy(ret);\n n -= p.data.length;\n\n while (p = p.next) {\n var buf = p.data;\n var nb = n > buf.length ? buf.length : n;\n buf.copy(ret, ret.length - n, 0, nb);\n n -= nb;\n\n if (n === 0) {\n if (nb === buf.length) {\n ++c;\n if (p.next) this.head = p.next;else this.head = this.tail = null;\n } else {\n this.head = p;\n p.data = buf.slice(nb);\n }\n\n break;\n }\n\n ++c;\n }\n\n this.length -= c;\n return ret;\n } // Make sure the linked list only shows the minimal necessary information.\n\n }, {\n key: custom,\n value: function value(_, options) {\n return inspect(this, _objectSpread({}, options, {\n // Only inspect one level.\n depth: 0,\n // It should not recurse.\n customInspect: false\n }));\n }\n }]);\n\n return BufferList;\n}();","'use strict';\n\nvar utils = require('./utils');\nvar formats = require('./formats');\nvar has = Object.prototype.hasOwnProperty;\n\nvar arrayPrefixGenerators = {\n brackets: function brackets(prefix) {\n return prefix + '[]';\n },\n comma: 'comma',\n indices: function indices(prefix, key) {\n return prefix + '[' + key + ']';\n },\n repeat: function repeat(prefix) {\n return prefix;\n }\n};\n\nvar isArray = Array.isArray;\nvar push = Array.prototype.push;\nvar pushToArray = function (arr, valueOrArray) {\n push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]);\n};\n\nvar toISO = Date.prototype.toISOString;\n\nvar defaultFormat = formats['default'];\nvar defaults = {\n addQueryPrefix: false,\n allowDots: false,\n charset: 'utf-8',\n charsetSentinel: false,\n delimiter: '&',\n encode: true,\n encoder: utils.encode,\n encodeValuesOnly: false,\n format: defaultFormat,\n formatter: formats.formatters[defaultFormat],\n // deprecated\n indices: false,\n serializeDate: function serializeDate(date) {\n return toISO.call(date);\n },\n skipNulls: false,\n strictNullHandling: false\n};\n\nvar isNonNullishPrimitive = function isNonNullishPrimitive(v) {\n return typeof v === 'string'\n || typeof v === 'number'\n || typeof v === 'boolean'\n || typeof v === 'symbol'\n || typeof v === 'bigint';\n};\n\nvar stringify = function stringify(\n object,\n prefix,\n generateArrayPrefix,\n strictNullHandling,\n skipNulls,\n encoder,\n filter,\n sort,\n allowDots,\n serializeDate,\n formatter,\n encodeValuesOnly,\n charset\n) {\n var obj = object;\n if (typeof filter === 'function') {\n obj = filter(prefix, obj);\n } else if (obj instanceof Date) {\n obj = serializeDate(obj);\n } else if (generateArrayPrefix === 'comma' && isArray(obj)) {\n obj = obj.join(',');\n }\n\n if (obj === null) {\n if (strictNullHandling) {\n return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix;\n }\n\n obj = '';\n }\n\n if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) {\n if (encoder) {\n var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key');\n return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))];\n }\n return [formatter(prefix) + '=' + formatter(String(obj))];\n }\n\n var values = [];\n\n if (typeof obj === 'undefined') {\n return values;\n }\n\n var objKeys;\n if (isArray(filter)) {\n objKeys = filter;\n } else {\n var keys = Object.keys(obj);\n objKeys = sort ? keys.sort(sort) : keys;\n }\n\n for (var i = 0; i < objKeys.length; ++i) {\n var key = objKeys[i];\n\n if (skipNulls && obj[key] === null) {\n continue;\n }\n\n if (isArray(obj)) {\n pushToArray(values, stringify(\n obj[key],\n typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix,\n generateArrayPrefix,\n strictNullHandling,\n skipNulls,\n encoder,\n filter,\n sort,\n allowDots,\n serializeDate,\n formatter,\n encodeValuesOnly,\n charset\n ));\n } else {\n pushToArray(values, stringify(\n obj[key],\n prefix + (allowDots ? '.' + key : '[' + key + ']'),\n generateArrayPrefix,\n strictNullHandling,\n skipNulls,\n encoder,\n filter,\n sort,\n allowDots,\n serializeDate,\n formatter,\n encodeValuesOnly,\n charset\n ));\n }\n }\n\n return values;\n};\n\nvar normalizeStringifyOptions = function normalizeStringifyOptions(opts) {\n if (!opts) {\n return defaults;\n }\n\n if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') {\n throw new TypeError('Encoder has to be a function.');\n }\n\n var charset = opts.charset || defaults.charset;\n if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') {\n throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined');\n }\n\n var format = formats['default'];\n if (typeof opts.format !== 'undefined') {\n if (!has.call(formats.formatters, opts.format)) {\n throw new TypeError('Unknown format option provided.');\n }\n format = opts.format;\n }\n var formatter = formats.formatters[format];\n\n var filter = defaults.filter;\n if (typeof opts.filter === 'function' || isArray(opts.filter)) {\n filter = opts.filter;\n }\n\n return {\n addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix,\n allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots,\n charset: charset,\n charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel,\n delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter,\n encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode,\n encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder,\n encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly,\n filter: filter,\n formatter: formatter,\n serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate,\n skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls,\n sort: typeof opts.sort === 'function' ? opts.sort : null,\n strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling\n };\n};\n\nmodule.exports = function (object, opts) {\n var obj = object;\n var options = normalizeStringifyOptions(opts);\n\n var objKeys;\n var filter;\n\n if (typeof options.filter === 'function') {\n filter = options.filter;\n obj = filter('', obj);\n } else if (isArray(options.filter)) {\n filter = options.filter;\n objKeys = filter;\n }\n\n var keys = [];\n\n if (typeof obj !== 'object' || obj === null) {\n return '';\n }\n\n var arrayFormat;\n if (opts && opts.arrayFormat in arrayPrefixGenerators) {\n arrayFormat = opts.arrayFormat;\n } else if (opts && 'indices' in opts) {\n arrayFormat = opts.indices ? 'indices' : 'repeat';\n } else {\n arrayFormat = 'indices';\n }\n\n var generateArrayPrefix = arrayPrefixGenerators[arrayFormat];\n\n if (!objKeys) {\n objKeys = Object.keys(obj);\n }\n\n if (options.sort) {\n objKeys.sort(options.sort);\n }\n\n for (var i = 0; i < objKeys.length; ++i) {\n var key = objKeys[i];\n\n if (options.skipNulls && obj[key] === null) {\n continue;\n }\n pushToArray(keys, stringify(\n obj[key],\n key,\n generateArrayPrefix,\n options.strictNullHandling,\n options.skipNulls,\n options.encode ? options.encoder : null,\n options.filter,\n options.sort,\n options.allowDots,\n options.serializeDate,\n options.formatter,\n options.encodeValuesOnly,\n options.charset\n ));\n }\n\n var joined = keys.join(options.delimiter);\n var prefix = options.addQueryPrefix === true ? '?' : '';\n\n if (options.charsetSentinel) {\n if (options.charset === 'iso-8859-1') {\n // encodeURIComponent('✓'), the \"numeric entity\" representation of a checkmark\n prefix += 'utf8=%26%2310003%3B&';\n } else {\n // encodeURIComponent('✓')\n prefix += 'utf8=%E2%9C%93&';\n }\n }\n\n return joined.length > 0 ? prefix + joined : '';\n};\n","\n/**\n * For Node.js, simply re-export the core `util.deprecate` function.\n */\n\nmodule.exports = require('util').deprecate;\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n// a transform stream is a readable/writable stream where you do\n// something with the data. Sometimes it's called a \"filter\",\n// but that's not a great name for it, since that implies a thing where\n// some bits pass through, and others are simply ignored. (That would\n// be a valid example of a transform, of course.)\n//\n// While the output is causally related to the input, it's not a\n// necessarily symmetric or synchronous transformation. For example,\n// a zlib stream might take multiple plain-text writes(), and then\n// emit a single compressed chunk some time in the future.\n//\n// Here's how this works:\n//\n// The Transform stream has all the aspects of the readable and writable\n// stream classes. When you write(chunk), that calls _write(chunk,cb)\n// internally, and returns false if there's a lot of pending writes\n// buffered up. When you call read(), that calls _read(n) until\n// there's enough pending readable data buffered up.\n//\n// In a transform stream, the written data is placed in a buffer. When\n// _read(n) is called, it transforms the queued up data, calling the\n// buffered _write cb's as it consumes chunks. If consuming a single\n// written chunk would result in multiple output chunks, then the first\n// outputted bit calls the readcb, and subsequent chunks just go into\n// the read buffer, and will cause it to emit 'readable' if necessary.\n//\n// This way, back-pressure is actually determined by the reading side,\n// since _read has to be called to start processing a new chunk. However,\n// a pathological inflate type of transform can cause excessive buffering\n// here. For example, imagine a stream where every byte of input is\n// interpreted as an integer from 0-255, and then results in that many\n// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in\n// 1kb of data being output. In this case, you could write a very small\n// amount of input, and end up with a very large amount of output. In\n// such a pathological inflating mechanism, there'd be no way to tell\n// the system to stop doing the transform. A single 4MB write could\n// cause the system to run out of memory.\n//\n// However, even in such a pathological case, only a single written chunk\n// would be consumed, and then the rest would wait (un-transformed) until\n// the results of the previous transformed chunk were consumed.\n'use strict';\n\nmodule.exports = Transform;\n\nvar _require$codes = require('../errors').codes,\n ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,\n ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,\n ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,\n ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;\n\nvar Duplex = require('./_stream_duplex');\n\nrequire('inherits')(Transform, Duplex);\n\nfunction afterTransform(er, data) {\n var ts = this._transformState;\n ts.transforming = false;\n var cb = ts.writecb;\n\n if (cb === null) {\n return this.emit('error', new ERR_MULTIPLE_CALLBACK());\n }\n\n ts.writechunk = null;\n ts.writecb = null;\n if (data != null) // single equals check for both `null` and `undefined`\n this.push(data);\n cb(er);\n var rs = this._readableState;\n rs.reading = false;\n\n if (rs.needReadable || rs.length < rs.highWaterMark) {\n this._read(rs.highWaterMark);\n }\n}\n\nfunction Transform(options) {\n if (!(this instanceof Transform)) return new Transform(options);\n Duplex.call(this, options);\n this._transformState = {\n afterTransform: afterTransform.bind(this),\n needTransform: false,\n transforming: false,\n writecb: null,\n writechunk: null,\n writeencoding: null\n }; // start out asking for a readable event once data is transformed.\n\n this._readableState.needReadable = true; // we have implemented the _read method, and done the other things\n // that Readable wants before the first _read call, so unset the\n // sync guard flag.\n\n this._readableState.sync = false;\n\n if (options) {\n if (typeof options.transform === 'function') this._transform = options.transform;\n if (typeof options.flush === 'function') this._flush = options.flush;\n } // When the writable side finishes, then flush out anything remaining.\n\n\n this.on('prefinish', prefinish);\n}\n\nfunction prefinish() {\n var _this = this;\n\n if (typeof this._flush === 'function' && !this._readableState.destroyed) {\n this._flush(function (er, data) {\n done(_this, er, data);\n });\n } else {\n done(this, null, null);\n }\n}\n\nTransform.prototype.push = function (chunk, encoding) {\n this._transformState.needTransform = false;\n return Duplex.prototype.push.call(this, chunk, encoding);\n}; // This is the part where you do stuff!\n// override this function in implementation classes.\n// 'chunk' is an input chunk.\n//\n// Call `push(newChunk)` to pass along transformed output\n// to the readable side. You may call 'push' zero or more times.\n//\n// Call `cb(err)` when you are done with this chunk. If you pass\n// an error, then that'll put the hurt on the whole operation. If you\n// never call cb(), then you'll never get another chunk.\n\n\nTransform.prototype._transform = function (chunk, encoding, cb) {\n cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));\n};\n\nTransform.prototype._write = function (chunk, encoding, cb) {\n var ts = this._transformState;\n ts.writecb = cb;\n ts.writechunk = chunk;\n ts.writeencoding = encoding;\n\n if (!ts.transforming) {\n var rs = this._readableState;\n if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);\n }\n}; // Doesn't matter what the args are here.\n// _transform does all the work.\n// That we got here means that the readable side wants more data.\n\n\nTransform.prototype._read = function (n) {\n var ts = this._transformState;\n\n if (ts.writechunk !== null && !ts.transforming) {\n ts.transforming = true;\n\n this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);\n } else {\n // mark that we need a transform, so that any data that comes in\n // will get processed, now that we've asked for it.\n ts.needTransform = true;\n }\n};\n\nTransform.prototype._destroy = function (err, cb) {\n Duplex.prototype._destroy.call(this, err, function (err2) {\n cb(err2);\n });\n};\n\nfunction done(stream, er, data) {\n if (er) return stream.emit('error', er);\n if (data != null) // single equals check for both `null` and `undefined`\n stream.push(data); // TODO(BridgeAR): Write a test for these two error cases\n // if there's nothing in the write buffer, then that means\n // that nothing more will ever be provided\n\n if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();\n if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();\n return stream.push(null);\n}","var CombinedStream = require('combined-stream');\nvar util = require('util');\nvar path = require('path');\nvar http = require('http');\nvar https = require('https');\nvar parseUrl = require('url').parse;\nvar fs = require('fs');\nvar mime = require('mime-types');\nvar asynckit = require('asynckit');\nvar populate = require('./populate.js');\n\n// Public API\nmodule.exports = FormData;\n\n// make it a Stream\nutil.inherits(FormData, CombinedStream);\n\n/**\n * Create readable \"multipart/form-data\" streams.\n * Can be used to submit forms\n * and file uploads to other web applications.\n *\n * @constructor\n * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream\n */\nfunction FormData(options) {\n if (!(this instanceof FormData)) {\n return new FormData(options);\n }\n\n this._overheadLength = 0;\n this._valueLength = 0;\n this._valuesToMeasure = [];\n\n CombinedStream.call(this);\n\n options = options || {};\n for (var option in options) {\n this[option] = options[option];\n }\n}\n\nFormData.LINE_BREAK = '\\r\\n';\nFormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';\n\nFormData.prototype.append = function(field, value, options) {\n\n options = options || {};\n\n // allow filename as single option\n if (typeof options == 'string') {\n options = {filename: options};\n }\n\n var append = CombinedStream.prototype.append.bind(this);\n\n // all that streamy business can't handle numbers\n if (typeof value == 'number') {\n value = '' + value;\n }\n\n // https://github.com/felixge/node-form-data/issues/38\n if (util.isArray(value)) {\n // Please convert your array into string\n // the way web server expects it\n this._error(new Error('Arrays are not supported.'));\n return;\n }\n\n var header = this._multiPartHeader(field, value, options);\n var footer = this._multiPartFooter();\n\n append(header);\n append(value);\n append(footer);\n\n // pass along options.knownLength\n this._trackLength(header, value, options);\n};\n\nFormData.prototype._trackLength = function(header, value, options) {\n var valueLength = 0;\n\n // used w/ getLengthSync(), when length is known.\n // e.g. for streaming directly from a remote server,\n // w/ a known file a size, and not wanting to wait for\n // incoming file to finish to get its size.\n if (options.knownLength != null) {\n valueLength += +options.knownLength;\n } else if (Buffer.isBuffer(value)) {\n valueLength = value.length;\n } else if (typeof value === 'string') {\n valueLength = Buffer.byteLength(value);\n }\n\n this._valueLength += valueLength;\n\n // @check why add CRLF? does this account for custom/multiple CRLFs?\n this._overheadLength +=\n Buffer.byteLength(header) +\n FormData.LINE_BREAK.length;\n\n // empty or either doesn't have path or not an http response\n if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) {\n return;\n }\n\n // no need to bother with the length\n if (!options.knownLength) {\n this._valuesToMeasure.push(value);\n }\n};\n\nFormData.prototype._lengthRetriever = function(value, callback) {\n\n if (value.hasOwnProperty('fd')) {\n\n // take read range into a account\n // `end` = Infinity –> read file till the end\n //\n // TODO: Looks like there is bug in Node fs.createReadStream\n // it doesn't respect `end` options without `start` options\n // Fix it when node fixes it.\n // https://github.com/joyent/node/issues/7819\n if (value.end != undefined && value.end != Infinity && value.start != undefined) {\n\n // when end specified\n // no need to calculate range\n // inclusive, starts with 0\n callback(null, value.end + 1 - (value.start ? value.start : 0));\n\n // not that fast snoopy\n } else {\n // still need to fetch file size from fs\n fs.stat(value.path, function(err, stat) {\n\n var fileSize;\n\n if (err) {\n callback(err);\n return;\n }\n\n // update final size based on the range options\n fileSize = stat.size - (value.start ? value.start : 0);\n callback(null, fileSize);\n });\n }\n\n // or http response\n } else if (value.hasOwnProperty('httpVersion')) {\n callback(null, +value.headers['content-length']);\n\n // or request stream http://github.com/mikeal/request\n } else if (value.hasOwnProperty('httpModule')) {\n // wait till response come back\n value.on('response', function(response) {\n value.pause();\n callback(null, +response.headers['content-length']);\n });\n value.resume();\n\n // something else\n } else {\n callback('Unknown stream');\n }\n};\n\nFormData.prototype._multiPartHeader = function(field, value, options) {\n // custom header specified (as string)?\n // it becomes responsible for boundary\n // (e.g. to handle extra CRLFs on .NET servers)\n if (typeof options.header == 'string') {\n return options.header;\n }\n\n var contentDisposition = this._getContentDisposition(value, options);\n var contentType = this._getContentType(value, options);\n\n var contents = '';\n var headers = {\n // add custom disposition as third element or keep it two elements if not\n 'Content-Disposition': ['form-data', 'name=\"' + field + '\"'].concat(contentDisposition || []),\n // if no content type. allow it to be empty array\n 'Content-Type': [].concat(contentType || [])\n };\n\n // allow custom headers.\n if (typeof options.header == 'object') {\n populate(headers, options.header);\n }\n\n var header;\n for (var prop in headers) {\n if (!headers.hasOwnProperty(prop)) continue;\n header = headers[prop];\n\n // skip nullish headers.\n if (header == null) {\n continue;\n }\n\n // convert all headers to arrays.\n if (!Array.isArray(header)) {\n header = [header];\n }\n\n // add non-empty headers.\n if (header.length) {\n contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK;\n }\n }\n\n return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;\n};\n\nFormData.prototype._getContentDisposition = function(value, options) {\n\n var filename\n , contentDisposition\n ;\n\n if (typeof options.filepath === 'string') {\n // custom filepath for relative paths\n filename = path.normalize(options.filepath).replace(/\\\\/g, '/');\n } else if (options.filename || value.name || value.path) {\n // custom filename take precedence\n // formidable and the browser add a name property\n // fs- and request- streams have path property\n filename = path.basename(options.filename || value.name || value.path);\n } else if (value.readable && value.hasOwnProperty('httpVersion')) {\n // or try http response\n filename = path.basename(value.client._httpMessage.path || '');\n }\n\n if (filename) {\n contentDisposition = 'filename=\"' + filename + '\"';\n }\n\n return contentDisposition;\n};\n\nFormData.prototype._getContentType = function(value, options) {\n\n // use custom content-type above all\n var contentType = options.contentType;\n\n // or try `name` from formidable, browser\n if (!contentType && value.name) {\n contentType = mime.lookup(value.name);\n }\n\n // or try `path` from fs-, request- streams\n if (!contentType && value.path) {\n contentType = mime.lookup(value.path);\n }\n\n // or if it's http-reponse\n if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {\n contentType = value.headers['content-type'];\n }\n\n // or guess it from the filepath or filename\n if (!contentType && (options.filepath || options.filename)) {\n contentType = mime.lookup(options.filepath || options.filename);\n }\n\n // fallback to the default content type if `value` is not simple value\n if (!contentType && typeof value == 'object') {\n contentType = FormData.DEFAULT_CONTENT_TYPE;\n }\n\n return contentType;\n};\n\nFormData.prototype._multiPartFooter = function() {\n return function(next) {\n var footer = FormData.LINE_BREAK;\n\n var lastPart = (this._streams.length === 0);\n if (lastPart) {\n footer += this._lastBoundary();\n }\n\n next(footer);\n }.bind(this);\n};\n\nFormData.prototype._lastBoundary = function() {\n return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK;\n};\n\nFormData.prototype.getHeaders = function(userHeaders) {\n var header;\n var formHeaders = {\n 'content-type': 'multipart/form-data; boundary=' + this.getBoundary()\n };\n\n for (header in userHeaders) {\n if (userHeaders.hasOwnProperty(header)) {\n formHeaders[header.toLowerCase()] = userHeaders[header];\n }\n }\n\n return formHeaders;\n};\n\nFormData.prototype.getBoundary = function() {\n if (!this._boundary) {\n this._generateBoundary();\n }\n\n return this._boundary;\n};\n\nFormData.prototype.getBuffer = function() {\n var dataBuffer = new Buffer.alloc( 0 );\n var boundary = this.getBoundary();\n\n // Create the form content. Add Line breaks to the end of data.\n for (var i = 0, len = this._streams.length; i < len; i++) {\n if (typeof this._streams[i] !== 'function') {\n\n // Add content to the buffer.\n if(Buffer.isBuffer(this._streams[i])) {\n dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]);\n }else {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]);\n }\n\n // Add break after content.\n if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) {\n dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] );\n }\n }\n }\n\n // Add the footer and return the Buffer object.\n return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] );\n};\n\nFormData.prototype._generateBoundary = function() {\n // This generates a 50 character boundary similar to those used by Firefox.\n // They are optimized for boyer-moore parsing.\n var boundary = '--------------------------';\n for (var i = 0; i < 24; i++) {\n boundary += Math.floor(Math.random() * 10).toString(16);\n }\n\n this._boundary = boundary;\n};\n\n// Note: getLengthSync DOESN'T calculate streams length\n// As workaround one can calculate file size manually\n// and add it as knownLength option\nFormData.prototype.getLengthSync = function() {\n var knownLength = this._overheadLength + this._valueLength;\n\n // Don't get confused, there are 3 \"internal\" streams for each keyval pair\n // so it basically checks if there is any value added to the form\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n // https://github.com/form-data/form-data/issues/40\n if (!this.hasKnownLength()) {\n // Some async length retrievers are present\n // therefore synchronous length calculation is false.\n // Please use getLength(callback) to get proper length\n this._error(new Error('Cannot calculate proper length in synchronous way.'));\n }\n\n return knownLength;\n};\n\n// Public API to check if length of added values is known\n// https://github.com/form-data/form-data/issues/196\n// https://github.com/form-data/form-data/issues/262\nFormData.prototype.hasKnownLength = function() {\n var hasKnownLength = true;\n\n if (this._valuesToMeasure.length) {\n hasKnownLength = false;\n }\n\n return hasKnownLength;\n};\n\nFormData.prototype.getLength = function(cb) {\n var knownLength = this._overheadLength + this._valueLength;\n\n if (this._streams.length) {\n knownLength += this._lastBoundary().length;\n }\n\n if (!this._valuesToMeasure.length) {\n process.nextTick(cb.bind(this, null, knownLength));\n return;\n }\n\n asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) {\n if (err) {\n cb(err);\n return;\n }\n\n values.forEach(function(length) {\n knownLength += length;\n });\n\n cb(null, knownLength);\n });\n};\n\nFormData.prototype.submit = function(params, cb) {\n var request\n , options\n , defaults = {method: 'post'}\n ;\n\n // parse provided url if it's string\n // or treat it as options object\n if (typeof params == 'string') {\n\n params = parseUrl(params);\n options = populate({\n port: params.port,\n path: params.pathname,\n host: params.hostname,\n protocol: params.protocol\n }, defaults);\n\n // use custom params\n } else {\n\n options = populate(params, defaults);\n // if no port provided use default one\n if (!options.port) {\n options.port = options.protocol == 'https:' ? 443 : 80;\n }\n }\n\n // put that good code in getHeaders to some use\n options.headers = this.getHeaders(params.headers);\n\n // https if specified, fallback to http in any other case\n if (options.protocol == 'https:') {\n request = https.request(options);\n } else {\n request = http.request(options);\n }\n\n // get content length and fire away\n this.getLength(function(err, length) {\n if (err) {\n this._error(err);\n return;\n }\n\n // add content length\n request.setHeader('Content-Length', length);\n\n this.pipe(request);\n if (cb) {\n var onResponse;\n\n var callback = function (error, responce) {\n request.removeListener('error', callback);\n request.removeListener('response', onResponse);\n\n return cb.call(this, error, responce);\n };\n\n onResponse = callback.bind(this, null);\n\n request.on('error', callback);\n request.on('response', onResponse);\n }\n }.bind(this));\n\n return request;\n};\n\nFormData.prototype._error = function(err) {\n if (!this.error) {\n this.error = err;\n this.pause();\n this.emit('error', err);\n }\n};\n\nFormData.prototype.toString = function () {\n return '[object FormData]';\n};\n","var abort = require('./abort.js')\n , async = require('./async.js')\n ;\n\n// API\nmodule.exports = terminator;\n\n/**\n * Terminates jobs in the attached state context\n *\n * @this AsyncKitState#\n * @param {function} callback - final callback to invoke after termination\n */\nfunction terminator(callback)\n{\n if (!Object.keys(this.jobs).length)\n {\n return;\n }\n\n // fast forward iteration index\n this.index = this.size;\n\n // abort jobs\n abort(this);\n\n // send back results we have so far\n async(callback)(null, this.results);\n}\n","/**\n * Simple timer lets you record start and stop times, with an elapsed time getter.\n */\nclass SimpleTimer {\n constructor (startTime) {\n this.start = startTime || Date.now()\n this.end = null\n this.stopped = false\n }\n\n get elapsed () {\n if (this.stopped) {\n return this.end - this.start\n } else {\n return Date.now() - this.start\n }\n }\n\n stop () {\n if (this.stopped) return\n this.stopped = true\n this.end = Date.now()\n }\n\n toString () {\n return this.elapsed\n }\n\n toJSON () {\n return this.elapsed\n }\n}\n\nmodule.exports = SimpleTimer\n"],"sourceRoot":""} \ No newline at end of file diff --git a/dist/sourcemap-register.js b/dist/sourcemap-register.js new file mode 100644 index 0000000..801d431 --- /dev/null +++ b/dist/sourcemap-register.js @@ -0,0 +1,3933 @@ +module.exports = +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(645); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ({ + +/***/ 164: +/***/ (function(__unusedmodule, exports) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; + + +/***/ }), + +/***/ 215: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = __webpack_require__(537); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; + + +/***/ }), + +/***/ 226: +/***/ (function(__unusedmodule, exports) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ +exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); +}; + + +/***/ }), + +/***/ 282: +/***/ (function(module) { + +module.exports = require("module"); + +/***/ }), + +/***/ 284: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +var SourceMapConsumer = __webpack_require__(596).SourceMapConsumer; +var path = __webpack_require__(622); + +var fs; +try { + fs = __webpack_require__(747); + if (!fs.existsSync || !fs.readFileSync) { + // fs doesn't have all methods we need + fs = null; + } +} catch (err) { + /* nop */ +} + +var bufferFrom = __webpack_require__(650); + +// Only install once if called multiple times +var errorFormatterInstalled = false; +var uncaughtShimInstalled = false; + +// If true, the caches are reset before a stack trace formatting operation +var emptyCacheBetweenOperations = false; + +// Supports {browser, node, auto} +var environment = "auto"; + +// Maps a file path to a string containing the file contents +var fileContentsCache = {}; + +// Maps a file path to a source map for that file +var sourceMapCache = {}; + +// Regex for detecting source maps +var reSourceMap = /^data:application\/json[^,]+base64,/; + +// Priority list of retrieve handlers +var retrieveFileHandlers = []; +var retrieveMapHandlers = []; + +function isInBrowser() { + if (environment === "browser") + return true; + if (environment === "node") + return false; + return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer")); +} + +function hasGlobalProcessEventEmitter() { + return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function')); +} + +function handlerExec(list) { + return function(arg) { + for (var i = 0; i < list.length; i++) { + var ret = list[i](arg); + if (ret) { + return ret; + } + } + return null; + }; +} + +var retrieveFile = handlerExec(retrieveFileHandlers); + +retrieveFileHandlers.push(function(path) { + // Trim the path to make sure there is no extra whitespace. + path = path.trim(); + if (/^file:/.test(path)) { + // existsSync/readFileSync can't handle file protocol, but once stripped, it works + path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) { + return drive ? + '' : // file:///C:/dir/file -> C:/dir/file + '/'; // file:///root-dir/file -> /root-dir/file + }); + } + if (path in fileContentsCache) { + return fileContentsCache[path]; + } + + var contents = ''; + try { + if (!fs) { + // Use SJAX if we are in the browser + var xhr = new XMLHttpRequest(); + xhr.open('GET', path, /** async */ false); + xhr.send(null); + if (xhr.readyState === 4 && xhr.status === 200) { + contents = xhr.responseText; + } + } else if (fs.existsSync(path)) { + // Otherwise, use the filesystem + contents = fs.readFileSync(path, 'utf8'); + } + } catch (er) { + /* ignore any errors */ + } + + return fileContentsCache[path] = contents; +}); + +// Support URLs relative to a directory, but be careful about a protocol prefix +// in case we are in the browser (i.e. directories may start with "http://" or "file:///") +function supportRelativeURL(file, url) { + if (!file) return url; + var dir = path.dirname(file); + var match = /^\w+:\/\/[^\/]*/.exec(dir); + var protocol = match ? match[0] : ''; + var startPath = dir.slice(protocol.length); + if (protocol && /^\/\w\:/.test(startPath)) { + // handle file:///C:/ paths + protocol += '/'; + return protocol + path.resolve(dir.slice(protocol.length), url).replace(/\\/g, '/'); + } + return protocol + path.resolve(dir.slice(protocol.length), url); +} + +function retrieveSourceMapURL(source) { + var fileData; + + if (isInBrowser()) { + try { + var xhr = new XMLHttpRequest(); + xhr.open('GET', source, false); + xhr.send(null); + fileData = xhr.readyState === 4 ? xhr.responseText : null; + + // Support providing a sourceMappingURL via the SourceMap header + var sourceMapHeader = xhr.getResponseHeader("SourceMap") || + xhr.getResponseHeader("X-SourceMap"); + if (sourceMapHeader) { + return sourceMapHeader; + } + } catch (e) { + } + } + + // Get the URL of the source map + fileData = retrieveFile(source); + var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg; + // Keep executing the search to find the *last* sourceMappingURL to avoid + // picking up sourceMappingURLs from comments, strings, etc. + var lastMatch, match; + while (match = re.exec(fileData)) lastMatch = match; + if (!lastMatch) return null; + return lastMatch[1]; +}; + +// Can be overridden by the retrieveSourceMap option to install. Takes a +// generated source filename; returns a {map, optional url} object, or null if +// there is no source map. The map field may be either a string or the parsed +// JSON object (ie, it must be a valid argument to the SourceMapConsumer +// constructor). +var retrieveSourceMap = handlerExec(retrieveMapHandlers); +retrieveMapHandlers.push(function(source) { + var sourceMappingURL = retrieveSourceMapURL(source); + if (!sourceMappingURL) return null; + + // Read the contents of the source map + var sourceMapData; + if (reSourceMap.test(sourceMappingURL)) { + // Support source map URL as a data url + var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1); + sourceMapData = bufferFrom(rawData, "base64").toString(); + sourceMappingURL = source; + } else { + // Support source map URLs relative to the source URL + sourceMappingURL = supportRelativeURL(source, sourceMappingURL); + sourceMapData = retrieveFile(sourceMappingURL); + } + + if (!sourceMapData) { + return null; + } + + return { + url: sourceMappingURL, + map: sourceMapData + }; +}); + +function mapSourcePosition(position) { + var sourceMap = sourceMapCache[position.source]; + if (!sourceMap) { + // Call the (overrideable) retrieveSourceMap function to get the source map. + var urlAndMap = retrieveSourceMap(position.source); + if (urlAndMap) { + sourceMap = sourceMapCache[position.source] = { + url: urlAndMap.url, + map: new SourceMapConsumer(urlAndMap.map) + }; + + // Load all sources stored inline with the source map into the file cache + // to pretend like they are already loaded. They may not exist on disk. + if (sourceMap.map.sourcesContent) { + sourceMap.map.sources.forEach(function(source, i) { + var contents = sourceMap.map.sourcesContent[i]; + if (contents) { + var url = supportRelativeURL(sourceMap.url, source); + fileContentsCache[url] = contents; + } + }); + } + } else { + sourceMap = sourceMapCache[position.source] = { + url: null, + map: null + }; + } + } + + // Resolve the source URL relative to the URL of the source map + if (sourceMap && sourceMap.map && typeof sourceMap.map.originalPositionFor === 'function') { + var originalPosition = sourceMap.map.originalPositionFor(position); + + // Only return the original position if a matching line was found. If no + // matching line is found then we return position instead, which will cause + // the stack trace to print the path and line for the compiled file. It is + // better to give a precise location in the compiled file than a vague + // location in the original file. + if (originalPosition.source !== null) { + originalPosition.source = supportRelativeURL( + sourceMap.url, originalPosition.source); + return originalPosition; + } + } + + return position; +} + +// Parses code generated by FormatEvalOrigin(), a function inside V8: +// https://code.google.com/p/v8/source/browse/trunk/src/messages.js +function mapEvalOrigin(origin) { + // Most eval() calls are in this format + var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin); + if (match) { + var position = mapSourcePosition({ + source: match[2], + line: +match[3], + column: match[4] - 1 + }); + return 'eval at ' + match[1] + ' (' + position.source + ':' + + position.line + ':' + (position.column + 1) + ')'; + } + + // Parse nested eval() calls using recursion + match = /^eval at ([^(]+) \((.+)\)$/.exec(origin); + if (match) { + return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')'; + } + + // Make sure we still return useful information if we didn't find anything + return origin; +} + +// This is copied almost verbatim from the V8 source code at +// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The +// implementation of wrapCallSite() used to just forward to the actual source +// code of CallSite.prototype.toString but unfortunately a new release of V8 +// did something to the prototype chain and broke the shim. The only fix I +// could find was copy/paste. +function CallSiteToString() { + var fileName; + var fileLocation = ""; + if (this.isNative()) { + fileLocation = "native"; + } else { + fileName = this.getScriptNameOrSourceURL(); + if (!fileName && this.isEval()) { + fileLocation = this.getEvalOrigin(); + fileLocation += ", "; // Expecting source position to follow. + } + + if (fileName) { + fileLocation += fileName; + } else { + // Source code does not originate from a file and is not native, but we + // can still get the source position inside the source string, e.g. in + // an eval string. + fileLocation += ""; + } + var lineNumber = this.getLineNumber(); + if (lineNumber != null) { + fileLocation += ":" + lineNumber; + var columnNumber = this.getColumnNumber(); + if (columnNumber) { + fileLocation += ":" + columnNumber; + } + } + } + + var line = ""; + var functionName = this.getFunctionName(); + var addSuffix = true; + var isConstructor = this.isConstructor(); + var isMethodCall = !(this.isToplevel() || isConstructor); + if (isMethodCall) { + var typeName = this.getTypeName(); + // Fixes shim to be backward compatable with Node v0 to v4 + if (typeName === "[object Object]") { + typeName = "null"; + } + var methodName = this.getMethodName(); + if (functionName) { + if (typeName && functionName.indexOf(typeName) != 0) { + line += typeName + "."; + } + line += functionName; + if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) { + line += " [as " + methodName + "]"; + } + } else { + line += typeName + "." + (methodName || ""); + } + } else if (isConstructor) { + line += "new " + (functionName || ""); + } else if (functionName) { + line += functionName; + } else { + line += fileLocation; + addSuffix = false; + } + if (addSuffix) { + line += " (" + fileLocation + ")"; + } + return line; +} + +function cloneCallSite(frame) { + var object = {}; + Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) { + object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name]; + }); + object.toString = CallSiteToString; + return object; +} + +function wrapCallSite(frame, state) { + // provides interface backward compatibility + if (state === undefined) { + state = { nextPosition: null, curPosition: null } + } + if(frame.isNative()) { + state.curPosition = null; + return frame; + } + + // Most call sites will return the source file from getFileName(), but code + // passed to eval() ending in "//# sourceURL=..." will return the source file + // from getScriptNameOrSourceURL() instead + var source = frame.getFileName() || frame.getScriptNameOrSourceURL(); + if (source) { + var line = frame.getLineNumber(); + var column = frame.getColumnNumber() - 1; + + // Fix position in Node where some (internal) code is prepended. + // See https://github.com/evanw/node-source-map-support/issues/36 + // Header removed in node at ^10.16 || >=11.11.0 + // v11 is not an LTS candidate, we can just test the one version with it. + // Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11 + var noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/; + var headerLength = noHeader.test(process.version) ? 0 : 62; + if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) { + column -= headerLength; + } + + var position = mapSourcePosition({ + source: source, + line: line, + column: column + }); + state.curPosition = position; + frame = cloneCallSite(frame); + var originalFunctionName = frame.getFunctionName; + frame.getFunctionName = function() { + if (state.nextPosition == null) { + return originalFunctionName(); + } + return state.nextPosition.name || originalFunctionName(); + }; + frame.getFileName = function() { return position.source; }; + frame.getLineNumber = function() { return position.line; }; + frame.getColumnNumber = function() { return position.column + 1; }; + frame.getScriptNameOrSourceURL = function() { return position.source; }; + return frame; + } + + // Code called using eval() needs special handling + var origin = frame.isEval() && frame.getEvalOrigin(); + if (origin) { + origin = mapEvalOrigin(origin); + frame = cloneCallSite(frame); + frame.getEvalOrigin = function() { return origin; }; + return frame; + } + + // If we get here then we were unable to change the source position + return frame; +} + +// This function is part of the V8 stack trace API, for more info see: +// https://v8.dev/docs/stack-trace-api +function prepareStackTrace(error, stack) { + if (emptyCacheBetweenOperations) { + fileContentsCache = {}; + sourceMapCache = {}; + } + + var name = error.name || 'Error'; + var message = error.message || ''; + var errorString = name + ": " + message; + + var state = { nextPosition: null, curPosition: null }; + var processedStack = []; + for (var i = stack.length - 1; i >= 0; i--) { + processedStack.push('\n at ' + wrapCallSite(stack[i], state)); + state.nextPosition = state.curPosition; + } + state.curPosition = state.nextPosition = null; + return errorString + processedStack.reverse().join(''); +} + +// Generate position and snippet of original source with pointer +function getErrorSource(error) { + var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack); + if (match) { + var source = match[1]; + var line = +match[2]; + var column = +match[3]; + + // Support the inline sourceContents inside the source map + var contents = fileContentsCache[source]; + + // Support files on disk + if (!contents && fs && fs.existsSync(source)) { + try { + contents = fs.readFileSync(source, 'utf8'); + } catch (er) { + contents = ''; + } + } + + // Format the line from the original source code like node does + if (contents) { + var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1]; + if (code) { + return source + ':' + line + '\n' + code + '\n' + + new Array(column).join(' ') + '^'; + } + } + } + return null; +} + +function printErrorAndExit (error) { + var source = getErrorSource(error); + + // Ensure error is printed synchronously and not truncated + if (process.stderr._handle && process.stderr._handle.setBlocking) { + process.stderr._handle.setBlocking(true); + } + + if (source) { + console.error(); + console.error(source); + } + + console.error(error.stack); + process.exit(1); +} + +function shimEmitUncaughtException () { + var origEmit = process.emit; + + process.emit = function (type) { + if (type === 'uncaughtException') { + var hasStack = (arguments[1] && arguments[1].stack); + var hasListeners = (this.listeners(type).length > 0); + + if (hasStack && !hasListeners) { + return printErrorAndExit(arguments[1]); + } + } + + return origEmit.apply(this, arguments); + }; +} + +var originalRetrieveFileHandlers = retrieveFileHandlers.slice(0); +var originalRetrieveMapHandlers = retrieveMapHandlers.slice(0); + +exports.wrapCallSite = wrapCallSite; +exports.getErrorSource = getErrorSource; +exports.mapSourcePosition = mapSourcePosition; +exports.retrieveSourceMap = retrieveSourceMap; + +exports.install = function(options) { + options = options || {}; + + if (options.environment) { + environment = options.environment; + if (["node", "browser", "auto"].indexOf(environment) === -1) { + throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}") + } + } + + // Allow sources to be found by methods other than reading the files + // directly from disk. + if (options.retrieveFile) { + if (options.overrideRetrieveFile) { + retrieveFileHandlers.length = 0; + } + + retrieveFileHandlers.unshift(options.retrieveFile); + } + + // Allow source maps to be found by methods other than reading the files + // directly from disk. + if (options.retrieveSourceMap) { + if (options.overrideRetrieveSourceMap) { + retrieveMapHandlers.length = 0; + } + + retrieveMapHandlers.unshift(options.retrieveSourceMap); + } + + // Support runtime transpilers that include inline source maps + if (options.hookRequire && !isInBrowser()) { + var Module; + try { + Module = __webpack_require__(282); + } catch (err) { + // NOP: Loading in catch block to convert webpack error to warning. + } + var $compile = Module.prototype._compile; + + if (!$compile.__sourceMapSupport) { + Module.prototype._compile = function(content, filename) { + fileContentsCache[filename] = content; + sourceMapCache[filename] = undefined; + return $compile.call(this, content, filename); + }; + + Module.prototype._compile.__sourceMapSupport = true; + } + } + + // Configure options + if (!emptyCacheBetweenOperations) { + emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ? + options.emptyCacheBetweenOperations : false; + } + + // Install the error reformatter + if (!errorFormatterInstalled) { + errorFormatterInstalled = true; + Error.prepareStackTrace = prepareStackTrace; + } + + if (!uncaughtShimInstalled) { + var installHandler = 'handleUncaughtExceptions' in options ? + options.handleUncaughtExceptions : true; + + // Provide the option to not install the uncaught exception handler. This is + // to support other uncaught exception handlers (in test frameworks, for + // example). If this handler is not installed and there are no other uncaught + // exception handlers, uncaught exceptions will be caught by node's built-in + // exception handler and the process will still be terminated. However, the + // generated JavaScript code will be shown above the stack trace instead of + // the original source code. + if (installHandler && hasGlobalProcessEventEmitter()) { + uncaughtShimInstalled = true; + shimEmitUncaughtException(); + } + } +}; + +exports.resetRetrieveHandlers = function() { + retrieveFileHandlers.length = 0; + retrieveMapHandlers.length = 0; + + retrieveFileHandlers = originalRetrieveFileHandlers.slice(0); + retrieveMapHandlers = originalRetrieveMapHandlers.slice(0); + + retrieveSourceMap = handlerExec(retrieveMapHandlers); + retrieveFile = handlerExec(retrieveFileHandlers); +} + + +/***/ }), + +/***/ 327: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = __webpack_require__(983); +var binarySearch = __webpack_require__(164); +var ArraySet = __webpack_require__(837).ArraySet; +var base64VLQ = __webpack_require__(215); +var quickSort = __webpack_require__(226).quickSort; + +function SourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + configurable: true, + enumerable: true, + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL); + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this._absoluteSources = this._sources.toArray().map(function (s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this._sourceMapURL = aSourceMapURL; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ +BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + var i; + for (i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; +}; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + smc._sourceMapURL = aSourceMapURL; + smc._absoluteSources = smc._sources.toArray().map(function (s) { + return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); + }); + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._absoluteSources.slice(); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + var index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + var relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = null; + if (mapping.name) { + name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }), + +/***/ 341: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = __webpack_require__(215); +var util = __webpack_require__(983); +var ArraySet = __webpack_require__(837).ArraySet; +var MappingList = __webpack_require__(740).MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { + throw new Error( + 'original.line and original.column are not numbers -- you probably meant to omit ' + + 'the original mapping entirely and only map the generated position. If so, pass ' + + 'null for the original mapping instead of an object with empty or null values.' + ); + } + + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }), + +/***/ 537: +/***/ (function(__unusedmodule, exports) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; + + +/***/ }), + +/***/ 596: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = __webpack_require__(341).SourceMapGenerator; +exports.SourceMapConsumer = __webpack_require__(327).SourceMapConsumer; +exports.SourceNode = __webpack_require__(990).SourceNode; + + +/***/ }), + +/***/ 622: +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), + +/***/ 645: +/***/ (function(__unusedmodule, __unusedexports, __webpack_require__) { + +__webpack_require__(284).install(); + + +/***/ }), + +/***/ 650: +/***/ (function(module) { + +var toString = Object.prototype.toString + +var isModern = ( + typeof Buffer.alloc === 'function' && + typeof Buffer.allocUnsafe === 'function' && + typeof Buffer.from === 'function' +) + +function isArrayBuffer (input) { + return toString.call(input).slice(8, -1) === 'ArrayBuffer' +} + +function fromArrayBuffer (obj, byteOffset, length) { + byteOffset >>>= 0 + + var maxLength = obj.byteLength - byteOffset + + if (maxLength < 0) { + throw new RangeError("'offset' is out of bounds") + } + + if (length === undefined) { + length = maxLength + } else { + length >>>= 0 + + if (length > maxLength) { + throw new RangeError("'length' is out of bounds") + } + } + + return isModern + ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) + : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('"encoding" must be a valid string encoding') + } + + return isModern + ? Buffer.from(string, encoding) + : new Buffer(string, encoding) +} + +function bufferFrom (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number') + } + + if (isArrayBuffer(value)) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + return isModern + ? Buffer.from(value) + : new Buffer(value) +} + +module.exports = bufferFrom + + +/***/ }), + +/***/ 740: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = __webpack_require__(983); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; + + +/***/ }), + +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 837: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = __webpack_require__(983); +var has = Object.prototype.hasOwnProperty; +var hasNativeMap = typeof Map !== "undefined"; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = hasNativeMap ? new Map() : Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = hasNativeMap ? aStr : util.toSetString(aStr); + var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + if (hasNativeMap) { + this._set.set(aStr, idx); + } else { + this._set[sStr] = idx; + } + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + if (hasNativeMap) { + return this._set.has(aStr); + } else { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + } +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + if (hasNativeMap) { + var idx = this._set.get(aStr); + if (idx >= 0) { + return idx; + } + } else { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + } + + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; + + +/***/ }), + +/***/ 983: +/***/ (function(__unusedmodule, exports) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +} +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || urlRegexp.test(aPath); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + +/** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ +function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); +} +exports.parseSourceMapInput = parseSourceMapInput; + +/** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ +function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ''; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { + sourceRoot += '/'; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + var parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + var index = parsed.path.lastIndexOf('/'); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); +} +exports.computeSourceURL = computeSourceURL; + + +/***/ }), + +/***/ 990: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = __webpack_require__(341).SourceMapGenerator; +var util = __webpack_require__(983); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var remainingLinesIndex = 0; + var shiftNextLine = function() { + var lineContents = getNextLine(); + // The last line of a file might not have a newline. + var newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[remainingLinesIndex] || ''; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[remainingLinesIndex] || ''; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; + + +/***/ }) + +/******/ }); \ No newline at end of file diff --git a/node_modules/@actions/core/README.md b/node_modules/@actions/core/README.md deleted file mode 100644 index 457f73c..0000000 --- a/node_modules/@actions/core/README.md +++ /dev/null @@ -1,140 +0,0 @@ -# `@actions/core` - -> Core functions for setting results, logging, registering secrets and exporting variables across actions - -## Usage - -### Import the package - -```js -// javascript -const core = require('@actions/core'); - -// typescript -import * as core from '@actions/core'; -``` - -#### Inputs/Outputs - -Action inputs can be read with `getInput`. Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. - -```js -const myInput = core.getInput('inputName', { required: true }); - -core.setOutput('outputKey', 'outputVal'); -``` - -#### Exporting variables - -Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks. - -```js -core.exportVariable('envVar', 'Val'); -``` - -#### Setting a secret - -Setting a secret registers the secret with the runner to ensure it is masked in logs. - -```js -core.setSecret('myPassword'); -``` - -#### PATH Manipulation - -To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH. - -```js -core.addPath('/path/to/mytool'); -``` - -#### Exit codes - -You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success. - -```js -const core = require('@actions/core'); - -try { - // Do stuff -} -catch (err) { - // setFailed logs the message and sets a failing exit code - core.setFailed(`Action failed with error ${err}`); -} - -Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned. - -``` - -#### Logging - -Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs). - -```js -const core = require('@actions/core'); - -const myInput = core.getInput('input'); -try { - core.debug('Inside try block'); - - if (!myInput) { - core.warning('myInput was not set'); - } - - // Do stuff -} -catch (err) { - core.error(`Error ${err}, action may still succeed though`); -} -``` - -This library can also wrap chunks of output in foldable groups. - -```js -const core = require('@actions/core') - -// Manually wrap output -core.startGroup('Do some function') -doSomeFunction() -core.endGroup() - -// Wrap an asynchronous function call -const result = await core.group('Do something async', async () => { - const response = await doSomeHTTPRequest() - return response -}) -``` - -#### Action state - -You can use this library to save state and get state for sharing information between a given wrapper action: - -**action.yml** -```yaml -name: 'Wrapper action sample' -inputs: - name: - default: 'GitHub' -runs: - using: 'node12' - main: 'main.js' - post: 'cleanup.js' -``` - -In action's `main.js`: - -```js -const core = require('@actions/core'); - -core.saveState("pidToKill", 12345); -``` - -In action's `cleanup.js`: -```js -const core = require('@actions/core'); - -var pid = core.getState("pidToKill"); - -process.kill(pid); -``` \ No newline at end of file diff --git a/node_modules/@actions/core/lib/command.d.ts b/node_modules/@actions/core/lib/command.d.ts deleted file mode 100644 index 51b8f11..0000000 --- a/node_modules/@actions/core/lib/command.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -interface CommandProperties { - [key: string]: string; -} -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -export declare function issueCommand(command: string, properties: CommandProperties, message: string): void; -export declare function issue(name: string, message?: string): void; -export {}; diff --git a/node_modules/@actions/core/lib/command.js b/node_modules/@actions/core/lib/command.js deleted file mode 100644 index eeef233..0000000 --- a/node_modules/@actions/core/lib/command.js +++ /dev/null @@ -1,78 +0,0 @@ -"use strict"; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(require("os")); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return (s || '') - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return (s || '') - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/command.js.map b/node_modules/@actions/core/lib/command.js.map deleted file mode 100644 index 00a9861..0000000 --- a/node_modules/@actions/core/lib/command.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AAQxB;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAe;IAEf,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAS;IAC3B,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;SACb,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;SACb,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.d.ts b/node_modules/@actions/core/lib/core.d.ts deleted file mode 100644 index 6483c3c..0000000 --- a/node_modules/@actions/core/lib/core.d.ts +++ /dev/null @@ -1,112 +0,0 @@ -/** - * Interface for getInput options - */ -export interface InputOptions { - /** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */ - required?: boolean; -} -/** - * The code to exit an action - */ -export declare enum ExitCode { - /** - * A code indicating that the action was successful - */ - Success = 0, - /** - * A code indicating that the action was a failure - */ - Failure = 1 -} -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable - */ -export declare function exportVariable(name: string, val: string): void; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -export declare function setSecret(secret: string): void; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -export declare function addPath(inputPath: string): void; -/** - * Gets the value of an input. The value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -export declare function getInput(name: string, options?: InputOptions): string; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store - */ -export declare function setOutput(name: string, value: string): void; -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -export declare function setFailed(message: string): void; -/** - * Writes debug message to user log - * @param message debug message - */ -export declare function debug(message: string): void; -/** - * Adds an error issue - * @param message error issue message - */ -export declare function error(message: string): void; -/** - * Adds an warning issue - * @param message warning issue message - */ -export declare function warning(message: string): void; -/** - * Writes info to log with console.log. - * @param message info message - */ -export declare function info(message: string): void; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -export declare function startGroup(name: string): void; -/** - * End an output group. - */ -export declare function endGroup(): void; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -export declare function group(name: string, fn: () => Promise): Promise; -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store - */ -export declare function saveState(name: string, value: string): void; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -export declare function getState(name: string): string; diff --git a/node_modules/@actions/core/lib/core.js b/node_modules/@actions/core/lib/core.js deleted file mode 100644 index b99e1de..0000000 --- a/node_modules/@actions/core/lib/core.js +++ /dev/null @@ -1,202 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = require("./command"); -const os = __importStar(require("os")); -const path = __importStar(require("path")); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable - */ -function exportVariable(name, val) { - process.env[name] = val; - command_1.issueCommand('set-env', { name }, val); -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - command_1.issueCommand('add-path', {}, inputPath); - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. The value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store - */ -function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message - */ -function error(message) { - command_1.issue('error', message); -} -exports.error = error; -/** - * Adds an warning issue - * @param message warning issue message - */ -function warning(message) { - command_1.issue('warning', message); -} -exports.warning = warning; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store - */ -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.js.map b/node_modules/@actions/core/lib/core.js.map deleted file mode 100644 index 33c6268..0000000 --- a/node_modules/@actions/core/lib/core.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAE7C,uCAAwB;AACxB,2CAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAW;IACtD,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,CAAA;IACvB,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,GAAG,CAAC,CAAA;AACtC,CAAC;AAHD,wCAGC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;IACvC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AAHD,0BAGC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAe;IACvC,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IACnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAHD,8BAGC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,eAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;AACzB,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAe;IACrC,eAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;AAC3B,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/package.json b/node_modules/@actions/core/package.json deleted file mode 100644 index 9f39f1a..0000000 --- a/node_modules/@actions/core/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_from": "@actions/core@1.2.2", - "_id": "@actions/core@1.2.2", - "_inBundle": false, - "_integrity": "sha512-IbCx7oefq+Gi6FWbSs2Fnw8VkEI6Y4gvjrYprY3RV//ksq/KPMlClOerJ4jRosyal6zkUIc8R9fS/cpRMlGClg==", - "_location": "/@actions/core", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "@actions/core@1.2.2", - "name": "@actions/core", - "escapedName": "@actions%2fcore", - "scope": "@actions", - "rawSpec": "1.2.2", - "saveSpec": null, - "fetchSpec": "1.2.2" - }, - "_requiredBy": [ - "/" - ], - "_resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.2.tgz", - "_shasum": "3c4848d50378f9e3bcb67bcf97813382ec7369ee", - "_spec": "@actions/core@1.2.2", - "_where": "/Users/bret/repos/deploy-to-neocities", - "bugs": { - "url": "https://github.com/actions/toolkit/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "Actions core lib", - "devDependencies": { - "@types/node": "^12.0.2" - }, - "directories": { - "lib": "lib", - "test": "__tests__" - }, - "files": [ - "lib" - ], - "homepage": "https://github.com/actions/toolkit/tree/master/packages/core", - "keywords": [ - "github", - "actions", - "core" - ], - "license": "MIT", - "main": "lib/core.js", - "name": "@actions/core", - "publishConfig": { - "access": "public" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/actions/toolkit.git", - "directory": "packages/core" - }, - "scripts": { - "audit-moderate": "npm install && npm audit --audit-level=moderate", - "test": "echo \"Error: run tests from root\" && exit 1", - "tsc": "tsc" - }, - "types": "lib/core.d.ts", - "version": "1.2.2" -} diff --git a/node_modules/async-folder-walker/.github/workflows/tests.yml b/node_modules/async-folder-walker/.github/workflows/tests.yml deleted file mode 100644 index 1cfe45a..0000000 --- a/node_modules/async-folder-walker/.github/workflows/tests.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: tests - -on: [push] - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - node: [12] - - steps: - - uses: actions/checkout@v1 - - name: Use Node.js ${{ matrix.node }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node }} - - name: npm install && npm test - run: | - npm i - npm test - env: - CI: true diff --git a/node_modules/async-folder-walker/.vscode/launch.json b/node_modules/async-folder-walker/.vscode/launch.json deleted file mode 100644 index 8392ff4..0000000 --- a/node_modules/async-folder-walker/.vscode/launch.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "type": "node", - "request": "launch", - "name": "Launch via NPM", - "runtimeExecutable": "npm", - "runtimeArgs": [ - "run-script", - "debug" - ], - "port": 9229, - "skipFiles": [ - "/**" - ] - } - ] -} diff --git a/node_modules/async-folder-walker/CHANGELOG.md b/node_modules/async-folder-walker/CHANGELOG.md deleted file mode 100644 index db0fe31..0000000 --- a/node_modules/async-folder-walker/CHANGELOG.md +++ /dev/null @@ -1,19 +0,0 @@ -# async-folder-walker Change Log - -All notable changes to this project will be documented in this file. -This project adheres to [Semantic Versioning](http://semver.org/). - -## Unreleased - -## 2.0.1 - 2019-12-27 - -* Internal tweaks -* Docs updates - -## 2.0.0 - 2019-12-26 - -* Remove ESM support. Fuggit. It simply offers zero benafits in a node env. - -## 1.0.0 - 2019-11-11 - -* Initial release. diff --git a/node_modules/async-folder-walker/CODE_OF_CONDUCT.md b/node_modules/async-folder-walker/CODE_OF_CONDUCT.md deleted file mode 100644 index 4caa43a..0000000 --- a/node_modules/async-folder-walker/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,4 +0,0 @@ -# Code of conduct - -- This repo is governed as a dictatorship starting with the originator of the project. -- No malevolence tolerated whatsoever. diff --git a/node_modules/async-folder-walker/CONTRIBUTING.md b/node_modules/async-folder-walker/CONTRIBUTING.md deleted file mode 100644 index afac12a..0000000 --- a/node_modules/async-folder-walker/CONTRIBUTING.md +++ /dev/null @@ -1,11 +0,0 @@ -# Contributing - -- Contributors reserve the right to walk away from this project at any moment with or without notice. -- Questions are welcome, however unless there is a official support contract established between the maintainers and the requester, support is not guaranteed. -- Patches, ideas and changes welcome. -- Fixes almost always welcome. -- Features sometimes welcome. Please open an issue to discuss the issue prior to spending lots of time on the problem. It may be rejected. If you don't want to wait around for the discussion to commence, and you really want to jump into the implementation work, be prepared for fork if the idea is respectfully declined. -- Try to stay within the style of the existing code. -- All tests must pass. -- Additional features or code paths must be tested. -- Aim for 100% coverage. diff --git a/node_modules/async-folder-walker/LICENSE b/node_modules/async-folder-walker/LICENSE deleted file mode 100644 index 7524c61..0000000 --- a/node_modules/async-folder-walker/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Bret Comnes - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/async-folder-walker/README.md b/node_modules/async-folder-walker/README.md deleted file mode 100644 index afdbdb9..0000000 --- a/node_modules/async-folder-walker/README.md +++ /dev/null @@ -1,170 +0,0 @@ -# async-folder-walker -[![Actions Status](https://github.com/bcomnes/async-folder-walker/workflows/tests/badge.svg)](https://github.com/bcomnes/async-folder-walker/actions) - -A recursive async iterator of the files and directories in a given folder. Can take multiple folders, limit walk depth and filter based on path names and stat results. - -![](https://repository-images.githubusercontent.com/223294839/43cf9600-0d3f-11ea-858e-81b08a14509f) - -``` -npm install async-folder-walker -``` - -## Usage - -``` js -const { asyncFolderWalker, allFiles } = require('async-folder-walker') - -async function iterateFiles () { - const walker = asyncFolderWalker(['.git', 'node_modules']) - for await (const file of walker) { - console.log(file) // logs the file path! - } -} - -async function getAllFiles () { - const allFilepaths = await allFiles(['.git', 'node_modules']) - console.log(allFilepaths) -} - -iterateFiles().then(() => getAllFiles()) -``` - -## API - -### `const { asyncFolderWalker, allFiles } = require('async-folder-walker')` - -Import `asyncFolderWalker` or `allFiles`. - -### `async-gen = asyncFolderWalker(paths, [opts])` - -Return an async generator that will iterate over all of files inside of a directory. `paths` can be a string path or an Array of string paths. - -You can iterate over each file and directory individually using a `for-await...of` loop. Note, you must be inside an [async function statement](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function). - -```js -const { asyncFolderWalker } = require('async-folder-walker'); -async function iterateFiles () { - const walker = asyncFolderWalker(['.git', 'node_modules']); - for await (const file of walker) { - console.log(file); // logs the file path! - } -} - -iterateFiles(); -``` - -Opts include: - -```js -{ - fs: require('fs'), - pathFilter: filepath => true, - statFilter st => true, - maxDepth: Infinity, - shaper: ({ root, filepath, stat, relname, basename }) => filepath -} -``` - -The `pathFilter` function allows you to filter files from additional async stat operations. Return false to filter the file. - -```js -{ // exclude node_modules - pathFilter: filepath => !filepath.includes(node_modules) -} -``` - -The `statFilter` function allows you to filter files based on the internal stat operation. Return false to filter the file. - -```js -{ // exclude all directories: - statFilter: st => !st.isDirectory() -} -``` - -The `shaper` function lets you change the shape of the returned value based on data accumulaed during the iteration. To return the same shape as [okdistribute/folder-walker](https://github.com/okdistribute/folder-walker) use the following function: - -```js -{ // Return the same shape as folder-walker - shaper: fwData => fwData -} -```` - -Example of a fwData object for a directory: - -```js -{ - root: '/Users/bret/repos/async-folder-walker/fixtures', - filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder', - stat: Stats { - dev: 16777220, - mode: 16877, - nlink: 3, - uid: 501, - gid: 20, - rdev: 0, - blksize: 4096, - ino: 30244023, - size: 96, - blocks: 0, - atimeMs: 1574381262779.8396, - mtimeMs: 1574380914743.5474, - ctimeMs: 1574380914743.5474, - birthtimeMs: 1574380905232.5996, - atime: 2019-11-22T00:07:42.780Z, - mtime: 2019-11-22T00:01:54.744Z, - ctime: 2019-11-22T00:01:54.744Z, - birthtime: 2019-11-22T00:01:45.233Z - }, - relname: 'sub-folder/sub-sub-folder', - basename: 'sub-sub-folder' -} -``` - -and another example for a file on windows: - -```js -{ - root: 'D:\\a\\async-folder-walker\\async-folder-walker\\fixtures', - filepath: 'D:\\a\\async-folder-walker\\async-folder-walker\\fixtures\\sub-folder\\sub-sub-folder\\sub-sub-folder-file.json', - stat: Stats { - dev: 1321874112, - mode: 33206, - nlink: 1, - uid: 0, - gid: 0, - rdev: 0, - blksize: 4096, - ino: 562949953421580, - size: 37, - blocks: 0, - atimeMs: 1577476819530.035, - mtimeMs: 1577476819530.035, - ctimeMs: 1577476819530.035, - birthtimeMs: 1577476819530.035, - atime: 2019-12-27T20:00:19.530Z, - mtime: 2019-12-27T20:00:19.530Z, - ctime: 2019-12-27T20:00:19.530Z, - birthtime: 2019-12-27T20:00:19.530Z - }, - relname: 'sub-folder\\sub-sub-folder\\sub-sub-folder-file.json', - basename: 'sub-sub-folder-file.json' -} -``` - -The `stat` property is an instance of [fs.Stats](https://nodejs.org/api/fs.html#fs_class_fs_stats) so it has extra methods not listed here. - -### `files = await allFiles(paths, [opts])` - -Get an Array of all files inside of a directory. `paths` can be a single string path or an array of string paths. - -`opts` Is the same as `asyncFolderWalker`. - -## See also - -This module is effectivly a rewrite of [okdistribute/folder-walker](https://github.com/okdistribute/folder-walker) using async generators instead of Node streams, and a few tweaks to the underlying options to make the results a bit more flexible. - -- [for-await...of](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) - -## License - -MIT diff --git a/node_modules/async-folder-walker/fixtures/another-folder/test.json b/node_modules/async-folder-walker/fixtures/another-folder/test.json deleted file mode 100644 index 0967ef4..0000000 --- a/node_modules/async-folder-walker/fixtures/another-folder/test.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/node_modules/async-folder-walker/fixtures/sub-folder/sub-folder-file.json b/node_modules/async-folder-walker/fixtures/sub-folder/sub-folder-file.json deleted file mode 100644 index 44cad12..0000000 --- a/node_modules/async-folder-walker/fixtures/sub-folder/sub-folder-file.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "sub-folder": "content" -} diff --git a/node_modules/async-folder-walker/fixtures/sub-folder/sub-sub-folder/sub-sub-folder-file.json b/node_modules/async-folder-walker/fixtures/sub-folder/sub-sub-folder/sub-sub-folder-file.json deleted file mode 100644 index 28d2472..0000000 --- a/node_modules/async-folder-walker/fixtures/sub-folder/sub-sub-folder/sub-sub-folder-file.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "sub-sub-folder": "content" -} diff --git a/node_modules/async-folder-walker/fixtures/test.json b/node_modules/async-folder-walker/fixtures/test.json deleted file mode 100644 index f2a886f..0000000 --- a/node_modules/async-folder-walker/fixtures/test.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "hello": "world" -} diff --git a/node_modules/async-folder-walker/index.js b/node_modules/async-folder-walker/index.js deleted file mode 100644 index 8a5b6d1..0000000 --- a/node_modules/async-folder-walker/index.js +++ /dev/null @@ -1,179 +0,0 @@ -const fs = require('fs') -const path = require('path') - -const { readdir, lstat } = fs.promises - -/** - * pathFilter lets you filter files based on a resolved `filepath`. - * @callback pathFilter - * @param {String} filepath - The resolved `filepath` of the file to test for filtering. - * - * @return {Boolean} Return false to filter the given `filepath` and true to include it. - */ -const pathFilter = filepath => true - -/** - * statFilter lets you filter files based on a lstat object. - * @callback statFilter - * @param {Object} st - A fs.Stats instance. - * - * @return {Boolean} Return false to filter the given `filepath` and true to include it. - */ -const statFilter = filepath => true - -/** - * FWStats is the object that the okdistribute/folder-walker module returns by default. - * - * @typedef FWStats - * @property {String} root - The filepath of the directory where the walk started. - * @property {String} filepath - The resolved filepath. - * @property {Object} stat - A fs.Stats instance. - * @property {String} relname - The relative path to `root`. - * @property {String} basename - The resolved filepath of the files containing directory. - */ - -/** - * shaper lets you change the shape of the returned file data from walk-time stats. - * @callback shaper - * @param {FWStats} fwStats - The same status object returned from folder-walker. - * - * @return {*} - Whatever you want returned from the directory walk. - */ -const shaper = ({ root, filepath, stat, relname, basename }) => filepath - -/** - * Options object - * - * @typedef Opts - * @property {pathFilter} [pathFilter] - A pathFilter cb. - * @property {statFilter} [statFilter] - A statFilter cb. - * @property {Number} [maxDepth=Infinity] - The maximum number of folders to walk down into. - * @property {shaper} [shaper] - A shaper cb. - */ - -/** - * Create an async generator that iterates over all folders and directories inside of `dirs`. - * - * @async - * @generator - * @function - * @public - * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths. - * @param {?(Opts)} opts - Options used for the directory walk. - * - * @yields {Promise} - An async iterator that returns anything. - */ -async function * asyncFolderWalker (dirs, opts) { - opts = Object.assign({ - fs, - pathFilter, - statFilter, - maxDepth: Infinity, - shaper - }, opts) - - const roots = [dirs].flat().filter(opts.pathFilter) - const pending = [] - - while (roots.length) { - const root = roots.shift() - pending.push(root) - - while (pending.length) { - const current = pending.shift() - if (typeof current === 'undefined') continue - const st = await lstat(current) - if ((!st.isDirectory() || depthLimiter(current, root, opts.maxDepth)) && opts.statFilter(st)) { - yield opts.shaper(fwShape(root, current, st)) - continue - } - - const files = await readdir(current) - files.sort() - - for (const file of files) { - var next = path.join(current, file) - if (opts.pathFilter(next)) pending.unshift(next) - } - if (current === root || !opts.statFilter(st)) continue - else yield opts.shaper(fwShape(root, current, st)) - } - } -} - -/** - * Generates the same shape as the folder-walker module. - * - * @function - * @private - * @param {String} root - Root filepath. - * @param {String} name - Target filepath. - * @param {Object} st - fs.Stat object. - * - * @return {FWStats} - Folder walker object. - */ -function fwShape (root, name, st) { - return { - root: root, - filepath: name, - stat: st, - relname: root === name ? path.basename(name) : path.relative(root, name), - basename: path.basename(name) - } -} - -/** - * Test if we are at maximum directory depth. - * - * @private - * @function - * @param {String} filePath - The resolved path of the target fille. - * @param {String} relativeTo - The root directory of the current walk. - * @param {Number} maxDepth - The maximum number of folders to descend into. - * - * @returns {Boolean} - Return true to signal stop descending. - */ -function depthLimiter (filePath, relativeTo, maxDepth) { - if (maxDepth === Infinity) return false - const rootDepth = relativeTo.split(path.sep).length - const fileDepth = filePath.split(path.sep).length - return fileDepth - rootDepth > maxDepth -} - -/** - * Async iterable collector - * - * @async - * @function - * @private - */ -async function all (iterator) { - const collect = [] - - for await (const result of iterator) { - collect.push(result) - } - - return collect -} - -/** - * allFiles gives you all files from the directory walk as an array. - * - * @async - * @function - * @public - * @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths. - * @param {?(Opts)} opts - Options used for the directory walk. - * - * @returns {Promise} - An async iterator that returns anything. - */ -async function allFiles (...args) { - return all(asyncFolderWalker(...args)) -} - -module.exports = { - asyncFolderWalker, - allFiles, - all -} diff --git a/node_modules/async-folder-walker/package.json b/node_modules/async-folder-walker/package.json deleted file mode 100644 index 2eb4bcb..0000000 --- a/node_modules/async-folder-walker/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_from": "async-folder-walker@^2.0.1", - "_id": "async-folder-walker@2.0.1", - "_inBundle": false, - "_integrity": "sha512-n0PW9w3HAZW7ems0XrgIYeX+3l2vX6HhZQyXMtkeKW3uEjHT5EOlKD8NgIeZK6fREnpw50F+Cb6ig3nWsuaTPA==", - "_location": "/async-folder-walker", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "async-folder-walker@^2.0.1", - "name": "async-folder-walker", - "escapedName": "async-folder-walker", - "rawSpec": "^2.0.1", - "saveSpec": null, - "fetchSpec": "^2.0.1" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/async-folder-walker/-/async-folder-walker-2.0.1.tgz", - "_shasum": "cfc3007945a4bb109c5c548a0f07cd8a9cc15671", - "_spec": "async-folder-walker@^2.0.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Bret Comnes", - "email": "bcomnes@gmail.com", - "url": "https://bret.io" - }, - "bugs": { - "url": "https://github.com/bcomnes/async-folder-walker/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "WIP - nothing to see here", - "devDependencies": { - "dependency-check": "^4.1.0", - "npm-run-all": "^4.1.5", - "p-temporary-directory": "^1.1.1", - "standard": "^14.3.1", - "tap": "^14.10.5" - }, - "homepage": "https://github.com/bcomnes/async-folder-walker", - "keywords": [], - "license": "MIT", - "main": "index.js", - "name": "async-folder-walker", - "repository": { - "type": "git", - "url": "git+https://github.com/bcomnes/async-folder-walker.git" - }, - "scripts": { - "debug": "node --nolazy --inspect-brk=9229 -r esm test.js", - "test": "run-s test:*", - "test:deps": "dependency-check . --no-dev --no-peer", - "test:standard": "standard", - "test:tap": "tap" - }, - "standard": { - "ignore": [ - "dist" - ] - }, - "version": "2.0.1" -} diff --git a/node_modules/async-folder-walker/test.js b/node_modules/async-folder-walker/test.js deleted file mode 100644 index 8fbb0bc..0000000 --- a/node_modules/async-folder-walker/test.js +++ /dev/null @@ -1,102 +0,0 @@ -const tap = require('tap') -const { asyncFolderWalker, allFiles } = require('.') -const path = require('path') -const tmp = require('p-temporary-directory') - -const fixtures = path.join(__dirname, 'fixtures') - -tap.test('for of multiple folders', async t => { - for await (const file of asyncFolderWalker([ - path.join(fixtures, 'sub-folder'), - path.join(fixtures, 'another-folder') - ])) { - t.ok(file, file) - } -}) - -tap.test('Array from async iterator', async t => { - const files = await allFiles([ - path.join(fixtures, 'sub-folder'), - path.join(fixtures, 'another-folder') - ]) - t.equal(files.length, 4, 'expected number of files are found') -}) - -tap.skip('Shape example', async t => { - await allFiles([fixtures], { - shaper: fwData => { - console.log(fwData) - return fwData - } - }) - t.pass('shape printed') -}) - -tap.test('No args', async t => { - for await (const file of asyncFolderWalker()) { - t.fail(file, 'no files should be found!') - } - t.pass('for of executed') -}) - -tap.test('No folders', async t => { - const [dir, cleanup] = await tmp() - try { - for await (const file of asyncFolderWalker(dir)) { - t.fail(file, 'no files should be found!') - } - t.pass('for of executed') - } finally { - await cleanup() - } -}) - -tap.test('When you just pass a file', async t => { - const [dir, cleanup] = await tmp() - try { - const theFile = path.join(fixtures, 'test.json') - const files = await allFiles([theFile, dir]) - t.equal(files.length, 1, 'only one file is found') - t.equal(theFile, files[0], 'only one file is found') - } finally { - await cleanup() - } -}) - -tap.test('pathFilter works', async t => { - const filterStrig = 'sub-folder' - const files = await allFiles(fixtures, { - pathFilter: p => !p.includes(filterStrig) - }) - - t.false(files.some(f => f.includes(filterStrig)), 'No paths include the excluded string') -}) - -tap.test('statFilter works', async t => { - const stats = await allFiles(fixtures, { - statFilter: st => !st.isDirectory(), // Exclude files - shaper: ({ root, filepath, stat, relname, basename }) => stat // Lets get the stats instead of paths - }) - - for (const st of stats) { - t.false(st.isDirectory(), 'none of the files are directories') - } -}) - -tap.test('dont include root directory in response', async (t) => { - const root = process.cwd() - for await (const file of asyncFolderWalker(root)) { - if (file === root) t.fail('root directory should not be in results') - } - t.pass('The root was not included in results.') -}) - -tap.test('dont walk past the maxDepth', async t => { - const maxDepth = 3 - const walker = asyncFolderWalker(['.git', 'node_modules'], { maxDepth }) - for await (const file of walker) { - const correctLength = file.split(path.sep).length - process.cwd().split(path.sep).length <= maxDepth - if (!correctLength) t.fail('walker walked past the depth it was supposed to') - } - t.pass('Walker was depth limited') -}) diff --git a/node_modules/async-neocities/CHANGELOG.md b/node_modules/async-neocities/CHANGELOG.md deleted file mode 100644 index b0eaaa1..0000000 --- a/node_modules/async-neocities/CHANGELOG.md +++ /dev/null @@ -1,128 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). - -## [v1.1.6](https://github.com/bcomnes/async-neocities/compare/v1.1.5...v1.1.6) - 2020-02-13 - -### Commits - -- refactor: limit decimal places in statHandler [`745b5c0`](https://github.com/bcomnes/async-neocities/commit/745b5c08b446723b837cf787e5911045cd0a01ed) - -## [v1.1.5](https://github.com/bcomnes/async-neocities/compare/v1.1.4...v1.1.5) - 2020-02-13 - -### Commits - -- bug: Log last stats before clear [`c4c83f8`](https://github.com/bcomnes/async-neocities/commit/c4c83f8e329303404dfec6a2943d8b01783040ad) - -## [v1.1.4](https://github.com/bcomnes/async-neocities/compare/v1.1.3...v1.1.4) - 2020-02-13 - -### Commits - -- bug: fix logging handler logic [`f6c5921`](https://github.com/bcomnes/async-neocities/commit/f6c5921d6fd0a420c5895ed6ba2fe2f766e726fd) - -## [v1.1.3](https://github.com/bcomnes/async-neocities/compare/v1.1.2...v1.1.3) - 2020-02-13 - -### Commits - -- refactor: Stop logging progress when the stream has been read. [`3ce0fc4`](https://github.com/bcomnes/async-neocities/commit/3ce0fc452acbbbd28c39b53a3a9a4318a8019c09) - -## [v1.1.2](https://github.com/bcomnes/async-neocities/compare/v1.1.1...v1.1.2) - 2020-02-13 - -### Commits - -- bug: Fix export name of statsHandler [`8958e5e`](https://github.com/bcomnes/async-neocities/commit/8958e5eeb947376690a1ce0cefe7cce3d59be5b8) - -## [v1.1.1](https://github.com/bcomnes/async-neocities/compare/v1.1.0...v1.1.1) - 2020-02-13 - -### Commits - -- bug: Fix busted export of stats handler. [`510ae29`](https://github.com/bcomnes/async-neocities/commit/510ae293263955e0e34d3ab48df253fb6e093053) - -## [v1.1.0](https://github.com/bcomnes/async-neocities/compare/v1.0.2...v1.1.0) - 2020-02-13 - -### Commits - -- feat: Finish statsCb API and add a stats-handler.js function. [`c8e6483`](https://github.com/bcomnes/async-neocities/commit/c8e64835e594e68715ef71590b08baac374052bd) - -## [v1.0.2](https://github.com/bcomnes/async-neocities/compare/v1.0.1...v1.0.2) - 2020-02-13 - -### Commits - -- chore: remove total time from stats [`3c375ec`](https://github.com/bcomnes/async-neocities/commit/3c375ecf64ae8536a8e3ccce0a69cd93c8c6a306) - -## [v1.0.1](https://github.com/bcomnes/async-neocities/compare/v1.0.0...v1.0.1) - 2020-02-13 - -### Commits - -- bug: Fix a number of logging bugs [`96fbea2`](https://github.com/bcomnes/async-neocities/commit/96fbea2bbd27ba1ac5105fce37e624d804dcbdb6) - -## [v1.0.0](https://github.com/bcomnes/async-neocities/compare/v0.0.10...v1.0.0) - 2020-02-12 - -### Commits - -- feat: progress API sketch [`be8b9ec`](https://github.com/bcomnes/async-neocities/commit/be8b9ec062b5ea23157a6a841c9d66d03a85a8ca) -- docs: update README [`ec4f5f1`](https://github.com/bcomnes/async-neocities/commit/ec4f5f154b690dba0814ec0955fee674e8e94692) -- CHANGELOG [`c9b64ed`](https://github.com/bcomnes/async-neocities/commit/c9b64edd4d3db025adc737982477ce0d760f3254) - -## [v0.0.10](https://github.com/bcomnes/async-neocities/compare/v0.0.9...v0.0.10) - 2020-02-10 - -### Commits - -- dont do work unless there is work [`616a306`](https://github.com/bcomnes/async-neocities/commit/616a306ba3ca091da11c9c85bae2b07cb0b2768e) - -## [v0.0.9](https://github.com/bcomnes/async-neocities/compare/v0.0.8...v0.0.9) - 2020-02-10 - -### Commits - -- Use stream ctor [`e8201a0`](https://github.com/bcomnes/async-neocities/commit/e8201a053950848962a220b83ffa1a97ebab6e70) - -## [v0.0.8](https://github.com/bcomnes/async-neocities/compare/v0.0.7...v0.0.8) - 2020-02-10 - -### Commits - -- Fix more bugs [`95da7b7`](https://github.com/bcomnes/async-neocities/commit/95da7b7218082ab51c1463851f87428dc0c501ac) - -## [v0.0.7](https://github.com/bcomnes/async-neocities/compare/v0.0.6...v0.0.7) - 2020-02-10 - -### Commits - -- bugs [`71ead78`](https://github.com/bcomnes/async-neocities/commit/71ead78e0f48f619816b3ae3ea8154e8301c77ac) - -## [v0.0.6](https://github.com/bcomnes/async-neocities/compare/v0.0.5...v0.0.6) - 2020-02-10 - -### Commits - -- bugs [`c1d9973`](https://github.com/bcomnes/async-neocities/commit/c1d9973afef3abd7d6edfc5a6ae1c9d37f6cb34d) - -## [v0.0.5](https://github.com/bcomnes/async-neocities/compare/v0.0.4...v0.0.5) - 2020-02-10 - -### Commits - -- bugs [`e542111`](https://github.com/bcomnes/async-neocities/commit/e542111f3404ab923be3490e62ba16b4f6b66a70) - -## [v0.0.4](https://github.com/bcomnes/async-neocities/compare/v0.0.3...v0.0.4) - 2020-02-10 - -### Commits - -- bump version [`a3da5f7`](https://github.com/bcomnes/async-neocities/commit/a3da5f77cda15fb3e9ec5861b588f616d8b0055c) - -## [v0.0.3](https://github.com/bcomnes/async-neocities/compare/v0.0.2...v0.0.3) - 2020-02-10 - -### Commits - -- tmp releases [`16a6db4`](https://github.com/bcomnes/async-neocities/commit/16a6db49a06bebef89007b94e03dd34e6d17b298) - -## [v0.0.2](https://github.com/bcomnes/async-neocities/compare/v0.0.1...v0.0.2) - 2020-02-10 - -## v0.0.1 - 2020-02-10 - -### Commits - -- Init [`bb055ae`](https://github.com/bcomnes/async-neocities/commit/bb055ae8e76b0344acc929e8ffd3974d19144001) -- fix tests [`c294b52`](https://github.com/bcomnes/async-neocities/commit/c294b528a64a50638c4374a8782b177fe3634eb2) -- Init [`9ec8fb5`](https://github.com/bcomnes/async-neocities/commit/9ec8fb557ebf8578c9eb07dedffcb1b7eedbd3e6) diff --git a/node_modules/async-neocities/CODE_OF_CONDUCT.md b/node_modules/async-neocities/CODE_OF_CONDUCT.md deleted file mode 100644 index 9c93cac..0000000 --- a/node_modules/async-neocities/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,4 +0,0 @@ -# Code of conduct - -- This repo is governed as a dictatorship starting with the originator of the project. -- This is a malevolence free zone. diff --git a/node_modules/async-neocities/CONTRIBUTING.md b/node_modules/async-neocities/CONTRIBUTING.md deleted file mode 100644 index e5ca73a..0000000 --- a/node_modules/async-neocities/CONTRIBUTING.md +++ /dev/null @@ -1,26 +0,0 @@ -# Contributing - -## Releasing - -Changelog, and releasing is autmated with npm scripts. To create a release: - -- Ensure a clean working git workspace. -- Run `npm version {patch,minor,major}`. - - This wills update the version number and generate the changelog. -- Run `npm publish`. - - This will push your local git branch and tags to the default remote, perform a [gh-release](https://ghub.io/gh-release), and create an npm publication. - -## Guidelines - -- Patches, ideas and changes welcome. -- Fixes almost always welcome. -- Features sometimes welcome. - - Please open an issue to discuss the issue prior to spending lots of time on the problem. - - It may be rejected. - - If you don't want to wait around for the discussion to commence, and you really want to jump into the implementation work, be prepared for fork if the idea is respectfully declined. -- Try to stay within the style of the existing code. -- All tests must pass. -- Additional features or code paths must be tested. -- Aim for 100% coverage. -- Questions are welcome, however unless there is a official support contract established between the maintainers and the requester, support is not guaranteed. -- Contributors reserve the right to walk away from this project at any moment with or without notice. diff --git a/node_modules/async-neocities/LICENSE b/node_modules/async-neocities/LICENSE deleted file mode 100644 index 7524c61..0000000 --- a/node_modules/async-neocities/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Bret Comnes - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/async-neocities/README.md b/node_modules/async-neocities/README.md deleted file mode 100644 index aa360b8..0000000 --- a/node_modules/async-neocities/README.md +++ /dev/null @@ -1,245 +0,0 @@ -# async-neocities -[![Actions Status](https://github.com/bcomnes/async-neocities/workflows/tests/badge.svg)](https://github.com/bcomnes/async-neocities/actions) - -An api client for [neocities][nc] with an async/promise API and an efficient deploy algorithm. - -
- -```console -npm install async-neocities -``` - -## Usage - -``` js -const path = require('path') -const Neocities = require('async-neocities') - -async function deploySite () { - const token = await Neocities.getKey('sitename', 'password') - - const client = new Neocities(token) - - console.log(await client.list()) // site files - console.log(await client.info()) // site info - - return client.deploy(path.join(__dirname, './site-contents')) -} - -deploySite.then(info => { console.log('done deploying site!') }) - .catch(e => { throw e }) -``` - -## API - -### `Neocities = require('async-neocities')` - -Import the Neocities API client. - -### `apiKey = await Neocities.getKey(sitename, password, [opts])` - -Static class method that will get an API Key from a sitename and password. - -`opts` include: - -```js -{ - url: 'https://neocities.org' // Base URL to use for requests -} -``` - -### `client = new Neocities(apiKey, [opts])` - -Create a new API client for a given API key. - -`opts` include: - -```js -{ - url: 'https://neocities.org' // Base URL to use for requests -} -``` - -### `response = await client.upload(files)` - -Pass an array of objects with the `{ name, path }` pair to upload these files to neocities, where `name` is desired remote unix path on neocities and `path` is the local path on disk in whichever format the local operating system desires. - -A successful `response`: - -```js -{ - result: 'success', - message: 'your file(s) have been successfully uploaded' -} -``` - -### `response = await client.delete(filenames)` - -Pass an array of path strings to delete on neocities. The path strings should be the unix style path of the file you want to delete. - -A successful `response`: - -```js -{ result: 'success', message: 'file(s) have been deleted' } -``` - -### `response = await client.list([queries])` - -Get a list of files for your site. The optional `queries` object is passed through [qs][qs] and added to the request. - -Available queries: - -```js -{ - path // list the contents of a subdirectory on neocities -} -``` - -Example `responses`: - -```json -{ - "result": "success", - "files": [ - { - "path": "index.html", - "is_directory": false, - "size": 1023, - "updated_at": "Sat, 13 Feb 2016 03:04:00 -0000", - "sha1_hash": "c8aac06f343c962a24a7eb111aad739ff48b7fb1" - }, - { - "path": "not_found.html", - "is_directory": false, - "size": 271, - "updated_at": "Sat, 13 Feb 2016 03:04:00 -0000", - "sha1_hash": "cfdf0bda2557c322be78302da23c32fec72ffc0b" - }, - { - "path": "images", - "is_directory": true, - "updated_at": "Sat, 13 Feb 2016 03:04:00 -0000" - }, - { - "path": "images/cat.png", - "is_directory": false, - "size": 16793, - "updated_at": "Sat, 13 Feb 2016 03:04:00 -0000", - "sha1_hash": "41fe08fc0dd44e79f799d03ece903e62be25dc7d" - } - ] -} -``` - -With the `path` query: - -```json -{ - "result": "success", - "files": [ - { - "path": "images/cat.png", - "is_directory": false, - "size": 16793, - "updated_at": "Sat, 13 Feb 2016 03:04:00 -0000", - "sha1_hash": "41fe08fc0dd44e79f799d03ece903e62be25dc7d" - } - ] -} -``` - -### `response = await client.info([queries])` - -Get info about your or other sites. The optional `queries` object is passed through [qs][qs] and added to the request. - -Available queries: - -```js -{ - sitename // get info on a given sitename -} -``` - -Example `responses`: - -```json -{ - "result": "success", - "info": { - "sitename": "youpi", - "hits": 5072, - "created_at": "Sat, 29 Jun 2013 10:11:38 +0000", - "last_updated": "Tue, 23 Jul 2013 20:04:03 +0000", - "domain": null, - "tags": [] - } -} -``` - -### `stats = await client.deploy(directory, [opts])` - -Deploy a path to a `directory`, efficiently only uploading missing and changed files. Files are determined to be different by size, and sha1 hash, if the size is the same. - -`opts` include: - -```js -{ - cleanup: false // delete orphaned files on neocities that are not in the `directory` - statsCb: (stats) => {} -} -``` - -For an example of a stats handler, see [lib/stats-handler.js]('./lib/stats-handler.js'). - -### `client.get(endpoint, [quieries], [opts])` - -Low level GET request to a given `endpoint`. - -**NOTE**: The `/api/` prefix is automatically added: `/api/${endpoint}` so that must be omitted from `endpoint. - -The optional `queries` object is stringified to a querystring using [`qs`][qs]a and added to the request. - -`opts` includes: - -```js -{ - method: 'GET', - headers: { ...client.defaultHeaders, ...opts.headers }, -} -``` - -Note, that `opts` is passed internally to [`node-fetch`][nf] and you can include any options that work for that client here. - -### `client.post(endpoint, formEntries, [opts])` - -Low level POST request to a given `endpoint`. - -**NOTE**: The `/api/` prefix is automatically adeded: `/api/${endpoint}` so that must be omitted from `endpoint. - -Pass a `formEntries` array or iterator containing objects with `{name, value}` pairs to be sent with the POST request as [FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData). The [form-datat][fd] module is used internally. - -`opts` include: - -```js -{ - method: 'POST', - body: new FormData(), // Don't override this. - headers: { ...client.defafultHeaders, ...formHeaders, opts.headers } -} -``` - -Note, that `opts` is passed internally to [`node-fetch`][nf] and you can include any options that work for that client here. - -## See also - -- [Neocities API docs](https://neocities.org/api) -- [Official Node.js API client](https://github.com/neocities/neocities-node) - -## License - -MIT - -[qs]: https://ghub.io/qs -[nf]: https://ghub.io/node-fetch -[fd]: https://ghub.io/form-data -[nc]: https://neocities.org diff --git a/node_modules/async-neocities/index.js b/node_modules/async-neocities/index.js deleted file mode 100644 index 729c2fa..0000000 --- a/node_modules/async-neocities/index.js +++ /dev/null @@ -1,303 +0,0 @@ -const { handleResponse } = require('fetch-errors') -const { createReadStream } = require('fs') -const afw = require('async-folder-walker') -const FormData = require('form-data') -const assert = require('nanoassert') -const fetch = require('node-fetch') -const { URL } = require('url') -const qs = require('qs') -const os = require('os') - -const { neocitiesLocalDiff } = require('./lib/folder-diff') -const pkg = require('./package.json') -const SimpleTimer = require('./lib/timer') -const { getStreamLength, meterStream } = require('./lib/stream-meter') -const statsHandler = require('./lib/stats-handler') - -const defaultURL = 'https://neocities.org' - -// Progress API constants -const START = 'start' -const PROGRESS = 'progress' // progress updates -const STOP = 'stop' -const SKIP = 'skip' -// Progress stages -const INSPECTING = 'inspecting' -const DIFFING = 'diffing' -const APPLYING = 'applying' - -/** - * NeocitiesAPIClient class representing a neocities api client. - */ -class NeocitiesAPIClient { - /** - * getKey returns an apiKey from a sitename and password. - * @param {String} sitename username/sitename to log into. - * @param {String} password password to log in with. - * @param {Object} [opts] Options object. - * @param {Object} [opts.url=https://neocities.org] Base URL to request to. - * @return {Promise} An api key for the sitename.. - */ - static getKey (sitename, password, opts) { - assert(sitename, 'must pass sitename as first arg') - assert(typeof sitename === 'string', 'user arg must be a string') - assert(password, 'must pass a password as the second arg') - assert(typeof password, 'password arg must be a string') - - opts = Object.assign({ - url: defaultURL - }, opts) - - const baseURL = opts.url - delete opts.url - - const url = new URL('/api/key', baseURL) - url.username = sitename - url.password = password - return fetch(url, opts) - } - - static statsHandler (...args) { return statsHandler(...args) } - - /** - * Create an async-neocities api client. - * @param {string} apiKey An apiKey to make requests with. - * @param {Object} [opts] Options object. - * @param {Object} [opts.url=https://neocities.org] Base URL to make requests to. - * @return {Object} An api client instance. - */ - constructor (apiKey, opts) { - assert(apiKey, 'must pass apiKey as first argument') - assert(typeof apiKey === 'string', 'apiKey must be a string') - opts = Object.assign({ - url: defaultURL - }) - - this.url = opts.url - this.apiKey = apiKey - } - - get defaultHeaders () { - return { - Authorization: `Bearer ${this.apiKey}`, - Accept: 'application/json', - 'User-Agent': `async-neocities/${pkg.version} (${os.type()})` - } - } - - /** - * Generic GET request to neocities. - * @param {String} endpoint An endpoint path to GET request. - * @param {Object} [quieries] An object that gets added to the request in the form of a query string. - * @param {Object} [opts] Options object. - * @param {String} [opts.method=GET] The http method to use. - * @param {Object} [opts.headers] Headers to include in the request. - * @return {Object} The parsed JSON from the request response. - */ - get (endpoint, quieries, opts) { - assert(endpoint, 'must pass endpoint as first argument') - opts = Object.assign({ - method: 'GET' - }, opts) - opts.headers = Object.assign({}, this.defaultHeaders, opts.headers) - - let path = `/api/${endpoint}` - if (quieries) path += `?${qs.stringify(quieries)}` - - const url = new URL(path, this.url) - return fetch(url, opts) - } - - /** - * Low level POST request to neocities with FormData. - * @param {String} endpoint The endpoint to make the request to. - * @param {Array.<{name: String, value: String}>} formEntries Array of form entries. - * @param {Object} [opts] Options object. - * @param {String} [opts.method=POST] HTTP Method. - * @param {Object} [opts.headers] Additional headers to send. - * @return {Object} The parsed JSON response object. - */ - async post (endpoint, formEntries, opts) { - assert(endpoint, 'must pass endpoint as first argument') - assert(formEntries, 'must pass formEntries as second argument') - - function createForm () { - const form = new FormData() - for (const { name, value } of formEntries) { - form.append(name, value) - } - - return form - } - - opts = Object.assign({ - method: 'POST', - statsCb: () => {} - }, opts) - const statsCb = opts.statsCb - delete opts.statsCb - - const stats = { - totalBytes: await getStreamLength(createForm()), - bytesWritten: 0 - } - statsCb(stats) - - const form = createForm() - - opts.body = meterStream(form, bytesRead => { - stats.bytesWritten = bytesRead - statsCb(stats) - }) - - opts.headers = Object.assign( - {}, - this.defaultHeaders, - form.getHeaders(), - opts.headers) - - const url = new URL(`/api/${endpoint}`, this.url) - return fetch(url, opts) - } - - /** - * Upload files to neocities - */ - upload (files, opts = {}) { - opts = { - statsCb: () => {}, - ...opts - } - const formEntries = files.map(({ name, path }) => { - const streamCtor = (next) => next(createReadStream(path)) - streamCtor.path = path - return { - name, - value: streamCtor - } - }) - - return this.post('upload', formEntries, { statsCb: opts.statsCb }).then(handleResponse) - } - - /** - * delete files from your website - */ - delete (filenames, opts = {}) { - assert(filenames, 'filenames is a required first argument') - assert(Array.isArray(filenames), 'filenames argument must be an array of file paths in your website') - opts = { - statsCb: () => {}, - ...opts - } - - const formEntries = filenames.map(file => ({ - name: 'filenames[]', - value: file - })) - - return this.post('delete', formEntries, { statsCb: opts.statsCb }).then(handleResponse) - } - - list (queries) { - // args.path: Path to list - return this.get('list', queries).then(handleResponse) - } - - /** - * info returns info on your site, or optionally on a sitename querystrign - * @param {Object} args Querystring arguments to include (e.g. sitename) - * @return {Promise} Fetch request promise - */ - info (queries) { - // args.sitename: sitename to get info on - return this.get('info', queries).then(handleResponse) - } - - /** - * Deploy a directory to neocities, skipping already uploaded files and optionally cleaning orphaned files. - * @param {String} directory The path of the directory to deploy. - * @param {Object} opts Options object. - * @param {Boolean} opts.cleanup Boolean to delete orphaned files nor not. Defaults to false. - * @param {Boolean} opts.statsCb Get access to stat info before uploading is complete. - * @return {Promise} Promise containing stats about the deploy - */ - async deploy (directory, opts) { - opts = { - cleanup: false, // delete remote orphaned files - statsCb: () => {}, - ...opts - } - - const statsCb = opts.statsCb - const totalTime = new SimpleTimer(Date.now()) - - // INSPECTION STAGE - statsCb({ stage: INSPECTING, status: START }) - const [localFiles, remoteFiles] = await Promise.all([ - afw.allFiles(directory, { shaper: f => f }), - this.list().then(res => res.files) - ]) - statsCb({ stage: INSPECTING, status: STOP }) - - // DIFFING STAGE - statsCb({ stage: DIFFING, status: START }) - const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles) - statsCb({ stage: DIFFING, status: STOP }) - - // APPLYING STAGE - if (filesToUpload.length === 0 && (!opts.cleanup || filesToDelete.length === 0)) { - statsCb({ stage: APPLYING, status: SKIP }) - return stats() - } - - statsCb({ stage: APPLYING, status: START }) - const work = [] - - if (filesToUpload.length > 0) { - const uploadJob = this.upload(filesToUpload, { - statsCb ({ totalBytes, bytesWritten }) { - statsCb({ - stage: APPLYING, - status: PROGRESS, - complete: false, - totalBytes, - bytesWritten, - get progress () { - return (this.bytesWritten / this.totalBytes) || 0 - } - }) - } - }).then((_) => { - statsCb({ - stage: APPLYING, - status: PROGRESS, - complete: true, - progress: 1.0 - }) - }) - work.push(uploadJob) - } - - if (opts.cleanup && filesToDelete.length > 0) { - work.push(this.delete(filesToDelete)) - } - - await Promise.all(work) - statsCb({ stage: APPLYING, status: STOP }) - - return stats() - - function stats () { - totalTime.stop() - return { - time: totalTime.elapsed, - filesToUpload, - filesToDelete, - filesSkipped - } - } - } -} - -module.exports = NeocitiesAPIClient diff --git a/node_modules/async-neocities/lib/folder-diff.js b/node_modules/async-neocities/lib/folder-diff.js deleted file mode 100644 index 62b3468..0000000 --- a/node_modules/async-neocities/lib/folder-diff.js +++ /dev/null @@ -1,179 +0,0 @@ -const crypto = require('crypto') -const util = require('util') -const fs = require('fs') - -const ppump = util.promisify(require('pump')) - -/** - * neocitiesLocalDiff returns an array of files to delete and update and some useful stats. - * @param {Array} neocitiesFiles Array of files returned from the neocities list api. - * @param {Array} localListing Array of files returned by a full data async-folder-walker run. - * @return {Promise} Object of filesToUpload, filesToDelete and filesSkipped. - */ -async function neocitiesLocalDiff (neocitiesFiles, localListing) { - const localIndex = {} - const ncIndex = {} - - const neoCitiesFiltered = neocitiesFiles.filter(f => !f.is_directory) - neoCitiesFiltered.forEach(f => { ncIndex[f.path] = f }) // index - const ncFiles = new Set(neoCitiesFiltered.map(f => f.path)) // shape - - const localListingFiltered = localListing.filter(f => !f.stat.isDirectory()) // files only - localListingFiltered.forEach(f => { localIndex[forceUnixRelname(f.relname)] = f }) // index - const localFiles = new Set(localListingFiltered.map(f => forceUnixRelname(f.relname))) // shape - - const filesToAdd = difference(localFiles, ncFiles) - const filesToDelete = difference(ncFiles, localFiles) - - const maybeUpdate = intersection(localFiles, ncFiles) - const skipped = new Set() - - for (const p of maybeUpdate) { - const local = localIndex[p] - const remote = ncIndex[p] - - if (local.stat.size !== remote.size) { filesToAdd.add(p); continue } - - const localSha1 = await sha1FromPath(local.filepath) - if (localSha1 !== remote.sha1_hash) { filesToAdd.add(p); continue } - - skipped.add(p) - } - - return { - filesToUpload: Array.from(filesToAdd).map(p => ({ - name: forceUnixRelname(localIndex[p].relname), - path: localIndex[p].filepath - })), - filesToDelete: Array.from(filesToDelete).map(p => ncIndex[p].path), - filesSkipped: Array.from(skipped).map(p => localIndex[p]) - } -} - -module.exports = { - neocitiesLocalDiff -} - -/** - * sha1FromPath returns a sha1 hex from a path - * @param {String} p string of the path of the file to hash - * @return {Promise} the hex representation of the sha1 - */ -async function sha1FromPath (p) { - const rs = fs.createReadStream(p) - const hash = crypto.createHash('sha1') - - await ppump(rs, hash) - - return hash.digest('hex') -} - -// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set#Implementing_basic_set_operations - -/** - * difference returnss the difference betwen setA and setB. - * @param {Set} setA LHS set - * @param {Set} setB RHS set - * @return {Set} The difference Set - */ -function difference (setA, setB) { - const _difference = new Set(setA) - for (const elem of setB) { - _difference.delete(elem) - } - return _difference -} - -/** - * intersection returns the interesction between setA and setB. - * @param {Set} setA setA LHS set - * @param {Set} setB setB RHS set - * @return {Set} The intersection set between setA and setB. - */ -function intersection (setA, setB) { - const _intersection = new Set() - for (const elem of setB) { - if (setA.has(elem)) { - _intersection.add(elem) - } - } - return _intersection -} - -/** - * forceUnixRelname forces a OS dependent path to a unix style path. - * @param {String} relname String path to convert to unix style. - * @return {String} The unix variant of the path - */ -function forceUnixRelname (relname) { - return relname.split(relname.sep).join('/') -} - -/** - * Example of neocitiesFiles - */ -// [ -// { -// path: 'img', -// is_directory: true, -// updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000' -// }, -// { -// path: 'index.html', -// is_directory: false, -// size: 1094, -// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', -// sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d' -// }, -// { -// path: 'neocities.png', -// is_directory: false, -// size: 13232, -// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', -// sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4' -// }, -// { -// path: 'not_found.html', -// is_directory: false, -// size: 347, -// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', -// sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030' -// }, -// { -// path: 'style.css', -// is_directory: false, -// size: 298, -// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', -// sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8' -// } -// ] - -/** - * Example of localListing - */ -// [{ -// root: '/Users/bret/repos/async-folder-walker/fixtures', -// filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder', -// stat: Stats { -// dev: 16777220, -// mode: 16877, -// nlink: 3, -// uid: 501, -// gid: 20, -// rdev: 0, -// blksize: 4096, -// ino: 30244023, -// size: 96, -// blocks: 0, -// atimeMs: 1574381262779.8396, -// mtimeMs: 1574380914743.5474, -// ctimeMs: 1574380914743.5474, -// birthtimeMs: 1574380905232.5996, -// atime: 2019-11-22T00:07:42.780Z, -// mtime: 2019-11-22T00:01:54.744Z, -// ctime: 2019-11-22T00:01:54.744Z, -// birthtime: 2019-11-22T00:01:45.233Z -// }, -// relname: 'sub-folder/sub-sub-folder', -// basename: 'sub-sub-folder' -// }] diff --git a/node_modules/async-neocities/lib/folder-diff.test.js b/node_modules/async-neocities/lib/folder-diff.test.js deleted file mode 100644 index 02d2df8..0000000 --- a/node_modules/async-neocities/lib/folder-diff.test.js +++ /dev/null @@ -1,65 +0,0 @@ -const afw = require('async-folder-walker') -const path = require('path') -const tap = require('tap') - -const { neocitiesLocalDiff } = require('./folder-diff') - -const remoteFiles = [ - { - path: 'img', - is_directory: true, - updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000' - }, - { - path: 'index.html', - is_directory: false, - size: 1094, - updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', - sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d' - }, - { - path: 'neocities.png', - is_directory: false, - size: 13232, - updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', - sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4' - }, - { - path: 'not_found.html', - is_directory: false, - size: 347, - updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', - sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030' - }, - { - path: 'style.css', - is_directory: false, - size: 298, - updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000', - sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8' - } -] - -tap.test('test differ', async t => { - const localFiles = await afw.allFiles(path.join(__dirname, '../fixtures'), { - shaper: f => f - }) - - const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles) - - t.true(['tootzzz.png', 'toot.gif', 'cat.png'].every(path => { - const found = filesToUpload.find(ftu => ftu.name === path) - t.ok(found.path && found.name, 'each file to upload has a name and path') - return found - }), 'every file to upload is included') - - t.deepEqual(filesToDelete, [ - 'not_found.html', - 'style.css' - ], 'filesToDelete returned correctly') - - t.true(['neocities.png'].every(path => { - const found = filesSkipped.find(fs => fs.relname === path) - return found - }), 'every file skipped is included') -}) diff --git a/node_modules/async-neocities/lib/stats-handler.js b/node_modules/async-neocities/lib/stats-handler.js deleted file mode 100644 index 0e1e41d..0000000 --- a/node_modules/async-neocities/lib/stats-handler.js +++ /dev/null @@ -1,60 +0,0 @@ -const prettyBytes = require('pretty-bytes') - -// Progress API constants -const START = 'start' -const PROGRESS = 'progress' // progress updates -const STOP = 'stop' -const SKIP = 'skip' - -function statsHandler (opts = {}) { - let progressInterval - const lastStats = {} - - return (stats) => { - switch (stats.status) { - case START: { - console.log(`Starting ${stats.stage} stage...`) - break - } - case STOP: { - console.log(`Finished ${stats.stage} stage.`) - break - } - case SKIP: { - console.log(`Skipping ${stats.stage} stage.`) - break - } - case PROGRESS: { - progressHandler(stats) - break - } - default: { - } - } - } - - function progressHandler (stats) { - Object.assign(lastStats, stats) - if (!stats.complete && stats.progress < 1) { - if (!progressInterval) { - progressInterval = setInterval(logProgress, 500, lastStats) - logProgress(lastStats) - } - } else { - if (progressInterval) { - clearInterval(progressInterval) - logProgress(lastStats) - } - } - } - - function logProgress (stats) { - let logLine = `Stage ${stats.stage}: ${(stats.progress * 100).toFixed(2)}%` - if (stats.bytesWritten != null && stats.totalBytes != null) { - logLine = logLine + ` (${prettyBytes(stats.bytesWritten)} / ${prettyBytes(stats.totalBytes)})` - } - console.log(logLine) - } -} - -module.exports = statsHandler diff --git a/node_modules/async-neocities/lib/stream-meter.js b/node_modules/async-neocities/lib/stream-meter.js deleted file mode 100644 index ee22c70..0000000 --- a/node_modules/async-neocities/lib/stream-meter.js +++ /dev/null @@ -1,38 +0,0 @@ -const { Writable, Transform } = require('streamx') -const pump = require('pump') -const pumpify = require('pumpify') - -function getStreamLength (readable) { - let length = 0 - - const dummyLoad = new Writable({ - write (data, cb) { - length += data.length - cb(null) - } - }) - - return new Promise((resolve, reject) => { - pump(readable, dummyLoad, (err) => { - if (err) return reject(err) - resolve(length) - }) - }) -} - -function meterStream (readable, statsCb) { - let bytesRead = 0 - const meter = new Transform({ - transform (data, cb) { - bytesRead += data.length - statsCb(bytesRead) - cb(null, data) - } - }) - return pumpify(readable, meter) -} - -module.exports = { - getStreamLength, - meterStream -} diff --git a/node_modules/async-neocities/lib/timer.js b/node_modules/async-neocities/lib/timer.js deleted file mode 100644 index cd78fd4..0000000 --- a/node_modules/async-neocities/lib/timer.js +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Simple timer lets you record start and stop times, with an elapsed time getter. - */ -class SimpleTimer { - constructor (startTime) { - this.start = startTime || Date.now() - this.end = null - this.stopped = false - } - - get elapsed () { - if (this.stopped) { - return this.end - this.start - } else { - return Date.now() - this.start - } - } - - stop () { - if (this.stopped) return - this.stopped = true - this.end = Date.now() - } - - toString () { - return this.elapsed - } - - toJSON () { - return this.elapsed - } -} - -module.exports = SimpleTimer diff --git a/node_modules/async-neocities/package.json b/node_modules/async-neocities/package.json deleted file mode 100644 index 8839c98..0000000 --- a/node_modules/async-neocities/package.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "_from": "async-neocities@1.1.6", - "_id": "async-neocities@1.1.6", - "_inBundle": false, - "_integrity": "sha512-q5fTVttBaN9znGxqxxDAh/ks+bZngIJPu6zPS7nlbJLC9NnOhrcP5Mu0VntxgEBtAuaExyI6uH/C+CxKSW0LeQ==", - "_location": "/async-neocities", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "async-neocities@1.1.6", - "name": "async-neocities", - "escapedName": "async-neocities", - "rawSpec": "1.1.6", - "saveSpec": null, - "fetchSpec": "1.1.6" - }, - "_requiredBy": [ - "/" - ], - "_resolved": "https://registry.npmjs.org/async-neocities/-/async-neocities-1.1.6.tgz", - "_shasum": "405b45565ccbe9c4ea56e65552ae9c48c20a0309", - "_spec": "async-neocities@1.1.6", - "_where": "/Users/bret/repos/deploy-to-neocities", - "author": { - "name": "Bret Comnes", - "email": "bcomnes@gmail.com", - "url": "https://bret.io" - }, - "bugs": { - "url": "https://github.com/bcomnes/async-neocities/issues" - }, - "bundleDependencies": false, - "dependencies": { - "async-folder-walker": "^2.0.1", - "fetch-errors": "^2.0.1", - "form-data": "^3.0.0", - "nanoassert": "^2.0.0", - "node-fetch": "^2.6.0", - "pretty-bytes": "^5.3.0", - "pump": "^3.0.0", - "pumpify": "^2.0.1", - "qs": "^6.9.1", - "streamx": "^2.6.0" - }, - "deprecated": false, - "description": "WIP - nothing to see here", - "devDependencies": { - "auto-changelog": "^1.16.2", - "dependency-check": "^4.1.0", - "gh-release": "^3.5.0", - "npm-run-all": "^4.1.5", - "standard": "^13.1.0", - "tap": "^14.10.2" - }, - "homepage": "https://github.com/bcomnes/async-neocities", - "keywords": [ - "neocities", - "async", - "api client", - "static hosting" - ], - "license": "MIT", - "main": "index.js", - "name": "async-neocities", - "repository": { - "type": "git", - "url": "git+https://github.com/bcomnes/async-neocities.git" - }, - "scripts": { - "prepublishOnly": "git push --follow-tags && gh-release", - "test": "run-s test:*", - "test:deps": "dependency-check . --no-dev --no-peer", - "test:standard": "standard", - "test:tape": "tap", - "version": "auto-changelog -p --template keepachangelog auto-changelog --breaking-pattern 'BREAKING:' && git add CHANGELOG.md" - }, - "standard": { - "ignore": [ - "dist" - ] - }, - "version": "1.1.6" -} diff --git a/node_modules/async-neocities/test.js b/node_modules/async-neocities/test.js deleted file mode 100644 index a2d2d1e..0000000 --- a/node_modules/async-neocities/test.js +++ /dev/null @@ -1,127 +0,0 @@ -const tap = require('tap') - -const { readFileSync } = require('fs') -const { resolve } = require('path') -const NeocitiesAPIClient = require('.') -const statsHanlder = require('./lib/stats-handler') - -let token = process.env.NEOCITIES_API_TOKEN -let fakeToken = false - -if (!token) { - try { - const config = JSON.parse(readFileSync(resolve(__dirname, 'config.json'))) - token = config.token - tap.test('token loaded', async t => { - t.ok(token) - }) - } catch (e) { - console.warn('error loading config.json') - console.warn('using fake token, live tests disabled') - fakeToken = true - token = '123456' - } -} - -tap.test('basic client api', async t => { - const client = new NeocitiesAPIClient(token) - - t.ok(client.info, 'info method available') - t.ok(client.list, 'list method available') - t.ok(client.get, 'get method available') - t.ok(client.post, 'post method available') -}) - -if (!fakeToken) { - tap.test('can get info about site', async t => { - const client = new NeocitiesAPIClient(token) - - const info = await client.info() - // console.log(info) - t.equal(info.result, 'success', 'info requesst successfull') - const list = await client.list() - // console.log(list) - t.equal(list.result, 'success', 'list result successfull') - }) - - // test('form data works the way I think', t => { - // const form = new FormData(); - // const p = resolve(__dirname, 'package.json'); - // form.append('package.json', next => next(createReadStream(p))); - // - // const concatStream = concat((data) => { - // console.log(data); - // t.end(); - // }); - // - // form.on('error', (err) => { - // t.error(err); - // }); - // form.pipe(concatStream); - // }); - - tap.test('can upload and delete files', async t => { - const client = new NeocitiesAPIClient(token) - - const uploadResults = await client.upload([ - { - name: 'toot.gif', - path: resolve(__dirname, 'fixtures/toot.gif') - }, - { - name: 'img/tootzzz.png', - path: resolve(__dirname, 'fixtures/tootzzz.png') - } - ]) - - // console.log(uploadResults) - t.equal(uploadResults.result, 'success', 'list result successfull') - - const deleteResults = await client.delete([ - 'toot.gif', - 'img/tootzzz.png' - ]) - // console.log(deleteResults) - t.equal(deleteResults.result, 'success', 'list result successfull') - }) - - tap.test('can deploy folders', async t => { - const client = new NeocitiesAPIClient(token) - - const deployStats = await client.deploy( - resolve(__dirname, 'fixtures'), - { - statsCb: statsHanlder(), - cleanup: false - } - ) - - t.ok(deployStats) - - // console.dir(deployStats, { depth: 99, colors: true }) - - const redeployStats = await client.deploy( - resolve(__dirname, 'fixtures'), - { - statsCb: statsHanlder(), - cleanup: false - } - ) - - t.ok(redeployStats) - - // console.dir(redeployStats, { depth: 99, colors: true }) - - const cleanupStats = await client.deploy( - resolve(__dirname, 'fixtures/empty'), - { - statsCb: statsHanlder(), - cleanup: true - } - ) - - t.ok(cleanupStats) - - // console.dir(cleanupStats, { depth: 99, colors: true }) - }) -} diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE deleted file mode 100644 index c9eca5d..0000000 --- a/node_modules/asynckit/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Alex Indigo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md deleted file mode 100644 index ddcc7e6..0000000 --- a/node_modules/asynckit/README.md +++ /dev/null @@ -1,233 +0,0 @@ -# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) - -Minimal async jobs utility library, with streams support. - -[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) - -[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) -[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) -[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) - - - -AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. -Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. - -It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. - -| compression | size | -| :----------------- | -------: | -| asynckit.js | 12.34 kB | -| asynckit.min.js | 4.11 kB | -| asynckit.min.js.gz | 1.47 kB | - - -## Install - -```sh -$ npm install --save asynckit -``` - -## Examples - -### Parallel Jobs - -Runs iterator over provided array in parallel. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will terminate rest of the active jobs (if abort function is provided) -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var parallel = require('asynckit').parallel - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , target = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// async job accepts one element from the array -// and a callback function -function asyncJob(item, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var parallel = require('asynckit/parallel') - , assert = require('assert') - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] - , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] - , target = [] - , keys = [] - ; - -parallel(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); - assert.deepEqual(keys, expectedKeys); -}); - -// supports full value, key, callback (shortcut) interface -function asyncJob(item, key, cb) -{ - // different delays (in ms) per item - var delay = item * 25; - - // pretend different jobs take different time to finish - // and not in consequential order - var timeoutId = setTimeout(function() { - keys.push(key); - target.push(item); - cb(null, item * 2); - }, delay); - - // allow to cancel "leftover" jobs upon error - // return function, invoking of which will abort this job - return clearTimeout.bind(null, timeoutId); -} -``` - -More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). - -### Serial Jobs - -Runs iterator over provided array sequentially. Stores output in the `result` array, -on the matching positions. In unlikely event of an error from one of the jobs, -will not proceed to the rest of the items in the list -and return error along with salvaged data to the main callback function. - -#### Input Array - -```javascript -var serial = require('asynckit/serial') - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// extended interface (item, key, callback) -// also supported for arrays -function asyncJob(item, key, cb) -{ - target.push(key); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). - -#### Input Object - -Also it supports named jobs, listed via object. - -```javascript -var serial = require('asynckit').serial - , assert = require('assert') - ; - -var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] - , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] - , target = [] - ; - -var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } - , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } - , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] - , target = [] - ; - - -serial(source, asyncJob, function(err, result) -{ - assert.deepEqual(result, expectedResult); - assert.deepEqual(target, expectedTarget); -}); - -// shortcut interface (item, callback) -// works for object as well as for the arrays -function asyncJob(item, cb) -{ - target.push(item); - - // it will be automatically made async - // even it iterator "returns" in the same event loop - cb(null, item * 2); -} -``` - -More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). - -_Note: Since _object_ is an _unordered_ collection of properties, -it may produce unexpected results with sequential iterations. -Whenever order of the jobs' execution is important please use `serialOrdered` method._ - -### Ordered Serial Iterations - -TBD - -For example [compare-property](compare-property) package. - -### Streaming interface - -TBD - -## Want to Know More? - -More examples can be found in [test folder](test/). - -Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. - -## License - -AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js deleted file mode 100644 index c612f1a..0000000 --- a/node_modules/asynckit/bench.js +++ /dev/null @@ -1,76 +0,0 @@ -/* eslint no-console: "off" */ - -var asynckit = require('./') - , async = require('async') - , assert = require('assert') - , expected = 0 - ; - -var Benchmark = require('benchmark'); -var suite = new Benchmark.Suite; - -var source = []; -for (var z = 1; z < 100; z++) -{ - source.push(z); - expected += z; -} - -suite -// add tests - -.add('async.map', function(deferred) -{ - var total = 0; - - async.map(source, - function(i, cb) - { - setImmediate(function() - { - total += i; - cb(null, total); - }); - }, - function(err, result) - { - assert.ifError(err); - assert.equal(result[result.length - 1], expected); - deferred.resolve(); - }); -}, {'defer': true}) - - -.add('asynckit.parallel', function(deferred) -{ - var total = 0; - - asynckit.parallel(source, - function(i, cb) - { - setImmediate(function() - { - total += i; - cb(null, total); - }); - }, - function(err, result) - { - assert.ifError(err); - assert.equal(result[result.length - 1], expected); - deferred.resolve(); - }); -}, {'defer': true}) - - -// add listeners -.on('cycle', function(ev) -{ - console.log(String(ev.target)); -}) -.on('complete', function() -{ - console.log('Fastest is ' + this.filter('fastest').map('name')); -}) -// run async -.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js deleted file mode 100644 index 455f945..0000000 --- a/node_modules/asynckit/index.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = -{ - parallel : require('./parallel.js'), - serial : require('./serial.js'), - serialOrdered : require('./serialOrdered.js') -}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js deleted file mode 100644 index 114367e..0000000 --- a/node_modules/asynckit/lib/abort.js +++ /dev/null @@ -1,29 +0,0 @@ -// API -module.exports = abort; - -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); - - // reset leftover jobs - state.jobs = {}; -} - -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js deleted file mode 100644 index 7f1288a..0000000 --- a/node_modules/asynckit/lib/async.js +++ /dev/null @@ -1,34 +0,0 @@ -var defer = require('./defer.js'); - -// API -module.exports = async; - -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - - // check if async happened - defer(function() { isAsync = true; }); - - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js deleted file mode 100644 index b67110c..0000000 --- a/node_modules/asynckit/lib/defer.js +++ /dev/null @@ -1,26 +0,0 @@ -module.exports = defer; - -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js deleted file mode 100644 index 5d2839a..0000000 --- a/node_modules/asynckit/lib/iterate.js +++ /dev/null @@ -1,75 +0,0 @@ -var async = require('./async.js') - , abort = require('./abort.js') - ; - -// API -module.exports = iterate; - -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } - - // return salvaged results - callback(error, state.results); - }); -} - -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; - - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } - - return aborter; -} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js deleted file mode 100644 index 78ad240..0000000 --- a/node_modules/asynckit/lib/readable_asynckit.js +++ /dev/null @@ -1,91 +0,0 @@ -var streamify = require('./streamify.js') - , defer = require('./defer.js') - ; - -// API -module.exports = ReadableAsyncKit; - -/** - * Base constructor for all streams - * used to hold properties/methods - */ -function ReadableAsyncKit() -{ - ReadableAsyncKit.super_.apply(this, arguments); - - // list of active jobs - this.jobs = {}; - - // add stream methods - this.destroy = destroy; - this._start = _start; - this._read = _read; -} - -/** - * Destroys readable stream, - * by aborting outstanding jobs - * - * @returns {void} - */ -function destroy() -{ - if (this.destroyed) - { - return; - } - - this.destroyed = true; - - if (typeof this.terminator == 'function') - { - this.terminator(); - } -} - -/** - * Starts provided jobs in async manner - * - * @private - */ -function _start() -{ - // first argument – runner function - var runner = arguments[0] - // take away first argument - , args = Array.prototype.slice.call(arguments, 1) - // second argument - input data - , input = args[0] - // last argument - result callback - , endCb = streamify.callback.call(this, args[args.length - 1]) - ; - - args[args.length - 1] = endCb; - // third argument - iterator - args[1] = streamify.iterator.call(this, args[1]); - - // allow time for proper setup - defer(function() - { - if (!this.destroyed) - { - this.terminator = runner.apply(null, args); - } - else - { - endCb(null, Array.isArray(input) ? [] : {}); - } - }.bind(this)); -} - - -/** - * Implement _read to comply with Readable streams - * Doesn't really make sense for flowing object mode - * - * @private - */ -function _read() -{ - -} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js deleted file mode 100644 index 5d2929f..0000000 --- a/node_modules/asynckit/lib/readable_parallel.js +++ /dev/null @@ -1,25 +0,0 @@ -var parallel = require('../parallel.js'); - -// API -module.exports = ReadableParallel; - -/** - * Streaming wrapper to `asynckit.parallel` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableParallel(list, iterator, callback) -{ - if (!(this instanceof ReadableParallel)) - { - return new ReadableParallel(list, iterator, callback); - } - - // turn on object mode - ReadableParallel.super_.call(this, {objectMode: true}); - - this._start(parallel, list, iterator, callback); -} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js deleted file mode 100644 index 7822698..0000000 --- a/node_modules/asynckit/lib/readable_serial.js +++ /dev/null @@ -1,25 +0,0 @@ -var serial = require('../serial.js'); - -// API -module.exports = ReadableSerial; - -/** - * Streaming wrapper to `asynckit.serial` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableSerial(list, iterator, callback) -{ - if (!(this instanceof ReadableSerial)) - { - return new ReadableSerial(list, iterator, callback); - } - - // turn on object mode - ReadableSerial.super_.call(this, {objectMode: true}); - - this._start(serial, list, iterator, callback); -} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js deleted file mode 100644 index 3de89c4..0000000 --- a/node_modules/asynckit/lib/readable_serial_ordered.js +++ /dev/null @@ -1,29 +0,0 @@ -var serialOrdered = require('../serialOrdered.js'); - -// API -module.exports = ReadableSerialOrdered; -// expose sort helpers -module.exports.ascending = serialOrdered.ascending; -module.exports.descending = serialOrdered.descending; - -/** - * Streaming wrapper to `asynckit.serialOrdered` - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {stream.Readable#} - */ -function ReadableSerialOrdered(list, iterator, sortMethod, callback) -{ - if (!(this instanceof ReadableSerialOrdered)) - { - return new ReadableSerialOrdered(list, iterator, sortMethod, callback); - } - - // turn on object mode - ReadableSerialOrdered.super_.call(this, {objectMode: true}); - - this._start(serialOrdered, list, iterator, sortMethod, callback); -} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js deleted file mode 100644 index cbea7ad..0000000 --- a/node_modules/asynckit/lib/state.js +++ /dev/null @@ -1,37 +0,0 @@ -// API -module.exports = state; - -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; - - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } - - return initState; -} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js deleted file mode 100644 index f56a1c9..0000000 --- a/node_modules/asynckit/lib/streamify.js +++ /dev/null @@ -1,141 +0,0 @@ -var async = require('./async.js'); - -// API -module.exports = { - iterator: wrapIterator, - callback: wrapCallback -}; - -/** - * Wraps iterators with long signature - * - * @this ReadableAsyncKit# - * @param {function} iterator - function to wrap - * @returns {function} - wrapped function - */ -function wrapIterator(iterator) -{ - var stream = this; - - return function(item, key, cb) - { - var aborter - , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) - ; - - stream.jobs[key] = wrappedCb; - - // it's either shortcut (item, cb) - if (iterator.length == 2) - { - aborter = iterator(item, wrappedCb); - } - // or long format (item, key, cb) - else - { - aborter = iterator(item, key, wrappedCb); - } - - return aborter; - }; -} - -/** - * Wraps provided callback function - * allowing to execute snitch function before - * real callback - * - * @this ReadableAsyncKit# - * @param {function} callback - function to wrap - * @returns {function} - wrapped function - */ -function wrapCallback(callback) -{ - var stream = this; - - var wrapped = function(error, result) - { - return finisher.call(stream, error, result, callback); - }; - - return wrapped; -} - -/** - * Wraps provided iterator callback function - * makes sure snitch only called once, - * but passes secondary calls to the original callback - * - * @this ReadableAsyncKit# - * @param {function} callback - callback to wrap - * @param {number|string} key - iteration key - * @returns {function} wrapped callback - */ -function wrapIteratorCallback(callback, key) -{ - var stream = this; - - return function(error, output) - { - // don't repeat yourself - if (!(key in stream.jobs)) - { - callback(error, output); - return; - } - - // clean up jobs - delete stream.jobs[key]; - - return streamer.call(stream, error, {key: key, value: output}, callback); - }; -} - -/** - * Stream wrapper for iterator callback - * - * @this ReadableAsyncKit# - * @param {mixed} error - error response - * @param {mixed} output - iterator output - * @param {function} callback - callback that expects iterator results - */ -function streamer(error, output, callback) -{ - if (error && !this.error) - { - this.error = error; - this.pause(); - this.emit('error', error); - // send back value only, as expected - callback(error, output && output.value); - return; - } - - // stream stuff - this.push(output); - - // back to original track - // send back value only, as expected - callback(error, output && output.value); -} - -/** - * Stream wrapper for finishing callback - * - * @this ReadableAsyncKit# - * @param {mixed} error - error response - * @param {mixed} output - iterator output - * @param {function} callback - callback that expects final results - */ -function finisher(error, output, callback) -{ - // signal end of the stream - // only for successfully finished streams - if (!error) - { - this.push(null); - } - - // back to original track - callback(error, output); -} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js deleted file mode 100644 index d6eb992..0000000 --- a/node_modules/asynckit/lib/terminator.js +++ /dev/null @@ -1,29 +0,0 @@ -var abort = require('./abort.js') - , async = require('./async.js') - ; - -// API -module.exports = terminator; - -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - - // fast forward iteration index - this.index = this.size; - - // abort jobs - abort(this); - - // send back results we have so far - async(callback)(null, this.results); -} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json deleted file mode 100644 index 4fc6fc5..0000000 --- a/node_modules/asynckit/package.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "_from": "asynckit@^0.4.0", - "_id": "asynckit@0.4.0", - "_inBundle": false, - "_integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "_location": "/asynckit", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "asynckit@^0.4.0", - "name": "asynckit", - "escapedName": "asynckit", - "rawSpec": "^0.4.0", - "saveSpec": null, - "fetchSpec": "^0.4.0" - }, - "_requiredBy": [ - "/form-data", - "/request/form-data" - ], - "_resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "_shasum": "c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79", - "_spec": "asynckit@^0.4.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/form-data", - "author": { - "name": "Alex Indigo", - "email": "iam@alexindigo.com" - }, - "bugs": { - "url": "https://github.com/alexindigo/asynckit/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Minimal async jobs utility library, with streams support", - "devDependencies": { - "browserify": "^13.0.0", - "browserify-istanbul": "^2.0.0", - "coveralls": "^2.11.9", - "eslint": "^2.9.0", - "istanbul": "^0.4.3", - "obake": "^0.1.2", - "phantomjs-prebuilt": "^2.1.7", - "pre-commit": "^1.1.3", - "reamde": "^1.1.0", - "rimraf": "^2.5.2", - "size-table": "^0.2.0", - "tap-spec": "^4.1.1", - "tape": "^4.5.1" - }, - "homepage": "https://github.com/alexindigo/asynckit#readme", - "keywords": [ - "async", - "jobs", - "parallel", - "serial", - "iterator", - "array", - "object", - "stream", - "destroy", - "terminate", - "abort" - ], - "license": "MIT", - "main": "index.js", - "name": "asynckit", - "pre-commit": [ - "clean", - "lint", - "test", - "browser", - "report", - "size" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/alexindigo/asynckit.git" - }, - "scripts": { - "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", - "clean": "rimraf coverage", - "debug": "tape test/test-*.js", - "lint": "eslint *.js lib/*.js test/*.js", - "report": "istanbul report", - "size": "browserify index.js | size-table asynckit", - "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", - "win-test": "tape test/test-*.js" - }, - "version": "0.4.0" -} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js deleted file mode 100644 index 3c50344..0000000 --- a/node_modules/asynckit/parallel.js +++ /dev/null @@ -1,43 +0,0 @@ -var iterate = require('./lib/iterate.js') - , initState = require('./lib/state.js') - , terminator = require('./lib/terminator.js') - ; - -// Public API -module.exports = parallel; - -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } - - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); - - state.index++; - } - - return terminator.bind(state, callback); -} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js deleted file mode 100644 index 6cd949a..0000000 --- a/node_modules/asynckit/serial.js +++ /dev/null @@ -1,17 +0,0 @@ -var serialOrdered = require('./serialOrdered.js'); - -// Public API -module.exports = serial; - -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js deleted file mode 100644 index 607eafe..0000000 --- a/node_modules/asynckit/serialOrdered.js +++ /dev/null @@ -1,75 +0,0 @@ -var iterate = require('./lib/iterate.js') - , initState = require('./lib/state.js') - , terminator = require('./lib/terminator.js') - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - - state.index++; - - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } - - // done here - callback(null, state.results); - }); - - return terminator.bind(state, callback); -} - -/* - * -- Sort methods - */ - -/** - * sort helper to sort array elements in ascending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function ascending(a, b) -{ - return a < b ? -1 : a > b ? 1 : 0; -} - -/** - * sort helper to sort array elements in descending order - * - * @param {mixed} a - an item to compare - * @param {mixed} b - an item to compare - * @returns {number} - comparison result - */ -function descending(a, b) -{ - return -1 * ascending(a, b); -} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js deleted file mode 100644 index d43465f..0000000 --- a/node_modules/asynckit/stream.js +++ /dev/null @@ -1,21 +0,0 @@ -var inherits = require('util').inherits - , Readable = require('stream').Readable - , ReadableAsyncKit = require('./lib/readable_asynckit.js') - , ReadableParallel = require('./lib/readable_parallel.js') - , ReadableSerial = require('./lib/readable_serial.js') - , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') - ; - -// API -module.exports = -{ - parallel : ReadableParallel, - serial : ReadableSerial, - serialOrdered : ReadableSerialOrdered, -}; - -inherits(ReadableAsyncKit, Readable); - -inherits(ReadableParallel, ReadableAsyncKit); -inherits(ReadableSerial, ReadableAsyncKit); -inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/combined-stream/License b/node_modules/combined-stream/License deleted file mode 100644 index 4804b7a..0000000 --- a/node_modules/combined-stream/License +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2011 Debuggable Limited - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/combined-stream/Readme.md b/node_modules/combined-stream/Readme.md deleted file mode 100644 index 9e367b5..0000000 --- a/node_modules/combined-stream/Readme.md +++ /dev/null @@ -1,138 +0,0 @@ -# combined-stream - -A stream that emits multiple other streams one after another. - -**NB** Currently `combined-stream` works with streams version 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatibility with `combined-stream`. - -- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module. - -- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another. - -## Installation - -``` bash -npm install combined-stream -``` - -## Usage - -Here is a simple example that shows how you can use combined-stream to combine -two files into one: - -``` javascript -var CombinedStream = require('combined-stream'); -var fs = require('fs'); - -var combinedStream = CombinedStream.create(); -combinedStream.append(fs.createReadStream('file1.txt')); -combinedStream.append(fs.createReadStream('file2.txt')); - -combinedStream.pipe(fs.createWriteStream('combined.txt')); -``` - -While the example above works great, it will pause all source streams until -they are needed. If you don't want that to happen, you can set `pauseStreams` -to `false`: - -``` javascript -var CombinedStream = require('combined-stream'); -var fs = require('fs'); - -var combinedStream = CombinedStream.create({pauseStreams: false}); -combinedStream.append(fs.createReadStream('file1.txt')); -combinedStream.append(fs.createReadStream('file2.txt')); - -combinedStream.pipe(fs.createWriteStream('combined.txt')); -``` - -However, what if you don't have all the source streams yet, or you don't want -to allocate the resources (file descriptors, memory, etc.) for them right away? -Well, in that case you can simply provide a callback that supplies the stream -by calling a `next()` function: - -``` javascript -var CombinedStream = require('combined-stream'); -var fs = require('fs'); - -var combinedStream = CombinedStream.create(); -combinedStream.append(function(next) { - next(fs.createReadStream('file1.txt')); -}); -combinedStream.append(function(next) { - next(fs.createReadStream('file2.txt')); -}); - -combinedStream.pipe(fs.createWriteStream('combined.txt')); -``` - -## API - -### CombinedStream.create([options]) - -Returns a new combined stream object. Available options are: - -* `maxDataSize` -* `pauseStreams` - -The effect of those options is described below. - -### combinedStream.pauseStreams = `true` - -Whether to apply back pressure to the underlaying streams. If set to `false`, -the underlaying streams will never be paused. If set to `true`, the -underlaying streams will be paused right after being appended, as well as when -`delayedStream.pipe()` wants to throttle. - -### combinedStream.maxDataSize = `2 * 1024 * 1024` - -The maximum amount of bytes (or characters) to buffer for all source streams. -If this value is exceeded, `combinedStream` emits an `'error'` event. - -### combinedStream.dataSize = `0` - -The amount of bytes (or characters) currently buffered by `combinedStream`. - -### combinedStream.append(stream) - -Appends the given `stream` to the combinedStream object. If `pauseStreams` is -set to `true, this stream will also be paused right away. - -`streams` can also be a function that takes one parameter called `next`. `next` -is a function that must be invoked in order to provide the `next` stream, see -example above. - -Regardless of how the `stream` is appended, combined-stream always attaches an -`'error'` listener to it, so you don't have to do that manually. - -Special case: `stream` can also be a String or Buffer. - -### combinedStream.write(data) - -You should not call this, `combinedStream` takes care of piping the appended -streams into itself for you. - -### combinedStream.resume() - -Causes `combinedStream` to start drain the streams it manages. The function is -idempotent, and also emits a `'resume'` event each time which usually goes to -the stream that is currently being drained. - -### combinedStream.pause(); - -If `combinedStream.pauseStreams` is set to `false`, this does nothing. -Otherwise a `'pause'` event is emitted, this goes to the stream that is -currently being drained, so you can use it to apply back pressure. - -### combinedStream.end(); - -Sets `combinedStream.writable` to false, emits an `'end'` event, and removes -all streams from the queue. - -### combinedStream.destroy(); - -Same as `combinedStream.end()`, except it emits a `'close'` event instead of -`'end'`. - -## License - -combined-stream is licensed under the MIT license. diff --git a/node_modules/combined-stream/lib/combined_stream.js b/node_modules/combined-stream/lib/combined_stream.js deleted file mode 100644 index 125f097..0000000 --- a/node_modules/combined-stream/lib/combined_stream.js +++ /dev/null @@ -1,208 +0,0 @@ -var util = require('util'); -var Stream = require('stream').Stream; -var DelayedStream = require('delayed-stream'); - -module.exports = CombinedStream; -function CombinedStream() { - this.writable = false; - this.readable = true; - this.dataSize = 0; - this.maxDataSize = 2 * 1024 * 1024; - this.pauseStreams = true; - - this._released = false; - this._streams = []; - this._currentStream = null; - this._insideLoop = false; - this._pendingNext = false; -} -util.inherits(CombinedStream, Stream); - -CombinedStream.create = function(options) { - var combinedStream = new this(); - - options = options || {}; - for (var option in options) { - combinedStream[option] = options[option]; - } - - return combinedStream; -}; - -CombinedStream.isStreamLike = function(stream) { - return (typeof stream !== 'function') - && (typeof stream !== 'string') - && (typeof stream !== 'boolean') - && (typeof stream !== 'number') - && (!Buffer.isBuffer(stream)); -}; - -CombinedStream.prototype.append = function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); - - if (isStreamLike) { - if (!(stream instanceof DelayedStream)) { - var newStream = DelayedStream.create(stream, { - maxDataSize: Infinity, - pauseStream: this.pauseStreams, - }); - stream.on('data', this._checkDataSize.bind(this)); - stream = newStream; - } - - this._handleErrors(stream); - - if (this.pauseStreams) { - stream.pause(); - } - } - - this._streams.push(stream); - return this; -}; - -CombinedStream.prototype.pipe = function(dest, options) { - Stream.prototype.pipe.call(this, dest, options); - this.resume(); - return dest; -}; - -CombinedStream.prototype._getNext = function() { - this._currentStream = null; - - if (this._insideLoop) { - this._pendingNext = true; - return; // defer call - } - - this._insideLoop = true; - try { - do { - this._pendingNext = false; - this._realGetNext(); - } while (this._pendingNext); - } finally { - this._insideLoop = false; - } -}; - -CombinedStream.prototype._realGetNext = function() { - var stream = this._streams.shift(); - - - if (typeof stream == 'undefined') { - this.end(); - return; - } - - if (typeof stream !== 'function') { - this._pipeNext(stream); - return; - } - - var getStream = stream; - getStream(function(stream) { - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('data', this._checkDataSize.bind(this)); - this._handleErrors(stream); - } - - this._pipeNext(stream); - }.bind(this)); -}; - -CombinedStream.prototype._pipeNext = function(stream) { - this._currentStream = stream; - - var isStreamLike = CombinedStream.isStreamLike(stream); - if (isStreamLike) { - stream.on('end', this._getNext.bind(this)); - stream.pipe(this, {end: false}); - return; - } - - var value = stream; - this.write(value); - this._getNext(); -}; - -CombinedStream.prototype._handleErrors = function(stream) { - var self = this; - stream.on('error', function(err) { - self._emitError(err); - }); -}; - -CombinedStream.prototype.write = function(data) { - this.emit('data', data); -}; - -CombinedStream.prototype.pause = function() { - if (!this.pauseStreams) { - return; - } - - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); - this.emit('pause'); -}; - -CombinedStream.prototype.resume = function() { - if (!this._released) { - this._released = true; - this.writable = true; - this._getNext(); - } - - if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); - this.emit('resume'); -}; - -CombinedStream.prototype.end = function() { - this._reset(); - this.emit('end'); -}; - -CombinedStream.prototype.destroy = function() { - this._reset(); - this.emit('close'); -}; - -CombinedStream.prototype._reset = function() { - this.writable = false; - this._streams = []; - this._currentStream = null; -}; - -CombinedStream.prototype._checkDataSize = function() { - this._updateDataSize(); - if (this.dataSize <= this.maxDataSize) { - return; - } - - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; - this._emitError(new Error(message)); -}; - -CombinedStream.prototype._updateDataSize = function() { - this.dataSize = 0; - - var self = this; - this._streams.forEach(function(stream) { - if (!stream.dataSize) { - return; - } - - self.dataSize += stream.dataSize; - }); - - if (this._currentStream && this._currentStream.dataSize) { - this.dataSize += this._currentStream.dataSize; - } -}; - -CombinedStream.prototype._emitError = function(err) { - this._reset(); - this.emit('error', err); -}; diff --git a/node_modules/combined-stream/package.json b/node_modules/combined-stream/package.json deleted file mode 100644 index 3c0d5c1..0000000 --- a/node_modules/combined-stream/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "_from": "combined-stream@^1.0.8", - "_id": "combined-stream@1.0.8", - "_inBundle": false, - "_integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "_location": "/combined-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "combined-stream@^1.0.8", - "name": "combined-stream", - "escapedName": "combined-stream", - "rawSpec": "^1.0.8", - "saveSpec": null, - "fetchSpec": "^1.0.8" - }, - "_requiredBy": [ - "/form-data", - "/request", - "/request/form-data" - ], - "_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "_shasum": "c3d45a8b34fd730631a110a8a2520682b31d5a7f", - "_spec": "combined-stream@^1.0.8", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/form-data", - "author": { - "name": "Felix Geisendörfer", - "email": "felix@debuggable.com", - "url": "http://debuggable.com/" - }, - "bugs": { - "url": "https://github.com/felixge/node-combined-stream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "deprecated": false, - "description": "A stream that emits multiple other streams one after another.", - "devDependencies": { - "far": "~0.0.7" - }, - "engines": { - "node": ">= 0.8" - }, - "homepage": "https://github.com/felixge/node-combined-stream", - "license": "MIT", - "main": "./lib/combined_stream", - "name": "combined-stream", - "repository": { - "type": "git", - "url": "git://github.com/felixge/node-combined-stream.git" - }, - "scripts": { - "test": "node test/run.js" - }, - "version": "1.0.8" -} diff --git a/node_modules/combined-stream/yarn.lock b/node_modules/combined-stream/yarn.lock deleted file mode 100644 index 7edf418..0000000 --- a/node_modules/combined-stream/yarn.lock +++ /dev/null @@ -1,17 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - -far@~0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/far/-/far-0.0.7.tgz#01c1fd362bcd26ce9cf161af3938aa34619f79a7" - dependencies: - oop "0.0.3" - -oop@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/oop/-/oop-0.0.3.tgz#70fa405a5650891a194fdc82ca68dad6dabf4401" diff --git a/node_modules/delayed-stream/.npmignore b/node_modules/delayed-stream/.npmignore deleted file mode 100644 index 9daeafb..0000000 --- a/node_modules/delayed-stream/.npmignore +++ /dev/null @@ -1 +0,0 @@ -test diff --git a/node_modules/delayed-stream/License b/node_modules/delayed-stream/License deleted file mode 100644 index 4804b7a..0000000 --- a/node_modules/delayed-stream/License +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2011 Debuggable Limited - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/delayed-stream/Makefile b/node_modules/delayed-stream/Makefile deleted file mode 100644 index b4ff85a..0000000 --- a/node_modules/delayed-stream/Makefile +++ /dev/null @@ -1,7 +0,0 @@ -SHELL := /bin/bash - -test: - @./test/run.js - -.PHONY: test - diff --git a/node_modules/delayed-stream/Readme.md b/node_modules/delayed-stream/Readme.md deleted file mode 100644 index aca36f9..0000000 --- a/node_modules/delayed-stream/Readme.md +++ /dev/null @@ -1,141 +0,0 @@ -# delayed-stream - -Buffers events from a stream until you are ready to handle them. - -## Installation - -``` bash -npm install delayed-stream -``` - -## Usage - -The following example shows how to write a http echo server that delays its -response by 1000 ms. - -``` javascript -var DelayedStream = require('delayed-stream'); -var http = require('http'); - -http.createServer(function(req, res) { - var delayed = DelayedStream.create(req); - - setTimeout(function() { - res.writeHead(200); - delayed.pipe(res); - }, 1000); -}); -``` - -If you are not using `Stream#pipe`, you can also manually release the buffered -events by calling `delayedStream.resume()`: - -``` javascript -var delayed = DelayedStream.create(req); - -setTimeout(function() { - // Emit all buffered events and resume underlaying source - delayed.resume(); -}, 1000); -``` - -## Implementation - -In order to use this meta stream properly, here are a few things you should -know about the implementation. - -### Event Buffering / Proxying - -All events of the `source` stream are hijacked by overwriting the `source.emit` -method. Until node implements a catch-all event listener, this is the only way. - -However, delayed-stream still continues to emit all events it captures on the -`source`, regardless of whether you have released the delayed stream yet or -not. - -Upon creation, delayed-stream captures all `source` events and stores them in -an internal event buffer. Once `delayedStream.release()` is called, all -buffered events are emitted on the `delayedStream`, and the event buffer is -cleared. After that, delayed-stream merely acts as a proxy for the underlaying -source. - -### Error handling - -Error events on `source` are buffered / proxied just like any other events. -However, `delayedStream.create` attaches a no-op `'error'` listener to the -`source`. This way you only have to handle errors on the `delayedStream` -object, rather than in two places. - -### Buffer limits - -delayed-stream provides a `maxDataSize` property that can be used to limit -the amount of data being buffered. In order to protect you from bad `source` -streams that don't react to `source.pause()`, this feature is enabled by -default. - -## API - -### DelayedStream.create(source, [options]) - -Returns a new `delayedStream`. Available options are: - -* `pauseStream` -* `maxDataSize` - -The description for those properties can be found below. - -### delayedStream.source - -The `source` stream managed by this object. This is useful if you are -passing your `delayedStream` around, and you still want to access properties -on the `source` object. - -### delayedStream.pauseStream = true - -Whether to pause the underlaying `source` when calling -`DelayedStream.create()`. Modifying this property afterwards has no effect. - -### delayedStream.maxDataSize = 1024 * 1024 - -The amount of data to buffer before emitting an `error`. - -If the underlaying source is emitting `Buffer` objects, the `maxDataSize` -refers to bytes. - -If the underlaying source is emitting JavaScript strings, the size refers to -characters. - -If you know what you are doing, you can set this property to `Infinity` to -disable this feature. You can also modify this property during runtime. - -### delayedStream.dataSize = 0 - -The amount of data buffered so far. - -### delayedStream.readable - -An ECMA5 getter that returns the value of `source.readable`. - -### delayedStream.resume() - -If the `delayedStream` has not been released so far, `delayedStream.release()` -is called. - -In either case, `source.resume()` is called. - -### delayedStream.pause() - -Calls `source.pause()`. - -### delayedStream.pipe(dest) - -Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`. - -### delayedStream.release() - -Emits and clears all events that have been buffered up so far. This does not -resume the underlaying source, use `delayedStream.resume()` instead. - -## License - -delayed-stream is licensed under the MIT license. diff --git a/node_modules/delayed-stream/lib/delayed_stream.js b/node_modules/delayed-stream/lib/delayed_stream.js deleted file mode 100644 index b38fc85..0000000 --- a/node_modules/delayed-stream/lib/delayed_stream.js +++ /dev/null @@ -1,107 +0,0 @@ -var Stream = require('stream').Stream; -var util = require('util'); - -module.exports = DelayedStream; -function DelayedStream() { - this.source = null; - this.dataSize = 0; - this.maxDataSize = 1024 * 1024; - this.pauseStream = true; - - this._maxDataSizeExceeded = false; - this._released = false; - this._bufferedEvents = []; -} -util.inherits(DelayedStream, Stream); - -DelayedStream.create = function(source, options) { - var delayedStream = new this(); - - options = options || {}; - for (var option in options) { - delayedStream[option] = options[option]; - } - - delayedStream.source = source; - - var realEmit = source.emit; - source.emit = function() { - delayedStream._handleEmit(arguments); - return realEmit.apply(source, arguments); - }; - - source.on('error', function() {}); - if (delayedStream.pauseStream) { - source.pause(); - } - - return delayedStream; -}; - -Object.defineProperty(DelayedStream.prototype, 'readable', { - configurable: true, - enumerable: true, - get: function() { - return this.source.readable; - } -}); - -DelayedStream.prototype.setEncoding = function() { - return this.source.setEncoding.apply(this.source, arguments); -}; - -DelayedStream.prototype.resume = function() { - if (!this._released) { - this.release(); - } - - this.source.resume(); -}; - -DelayedStream.prototype.pause = function() { - this.source.pause(); -}; - -DelayedStream.prototype.release = function() { - this._released = true; - - this._bufferedEvents.forEach(function(args) { - this.emit.apply(this, args); - }.bind(this)); - this._bufferedEvents = []; -}; - -DelayedStream.prototype.pipe = function() { - var r = Stream.prototype.pipe.apply(this, arguments); - this.resume(); - return r; -}; - -DelayedStream.prototype._handleEmit = function(args) { - if (this._released) { - this.emit.apply(this, args); - return; - } - - if (args[0] === 'data') { - this.dataSize += args[1].length; - this._checkIfMaxDataSizeExceeded(); - } - - this._bufferedEvents.push(args); -}; - -DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { - if (this._maxDataSizeExceeded) { - return; - } - - if (this.dataSize <= this.maxDataSize) { - return; - } - - this._maxDataSizeExceeded = true; - var message = - 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' - this.emit('error', new Error(message)); -}; diff --git a/node_modules/delayed-stream/package.json b/node_modules/delayed-stream/package.json deleted file mode 100644 index 5d86139..0000000 --- a/node_modules/delayed-stream/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "_from": "delayed-stream@~1.0.0", - "_id": "delayed-stream@1.0.0", - "_inBundle": false, - "_integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "_location": "/delayed-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "delayed-stream@~1.0.0", - "name": "delayed-stream", - "escapedName": "delayed-stream", - "rawSpec": "~1.0.0", - "saveSpec": null, - "fetchSpec": "~1.0.0" - }, - "_requiredBy": [ - "/combined-stream" - ], - "_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "_shasum": "df3ae199acadfb7d440aaae0b29e2272b24ec619", - "_spec": "delayed-stream@~1.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/combined-stream", - "author": { - "name": "Felix Geisendörfer", - "email": "felix@debuggable.com", - "url": "http://debuggable.com/" - }, - "bugs": { - "url": "https://github.com/felixge/node-delayed-stream/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Mike Atkins", - "email": "apeherder@gmail.com" - } - ], - "dependencies": {}, - "deprecated": false, - "description": "Buffers events from a stream until you are ready to handle them.", - "devDependencies": { - "fake": "0.2.0", - "far": "0.0.1" - }, - "engines": { - "node": ">=0.4.0" - }, - "homepage": "https://github.com/felixge/node-delayed-stream", - "license": "MIT", - "main": "./lib/delayed_stream", - "name": "delayed-stream", - "repository": { - "type": "git", - "url": "git://github.com/felixge/node-delayed-stream.git" - }, - "scripts": { - "test": "make test" - }, - "version": "1.0.0" -} diff --git a/node_modules/duplexify/.travis.yml b/node_modules/duplexify/.travis.yml deleted file mode 100644 index cb6e182..0000000 --- a/node_modules/duplexify/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: node_js -node_js: - - "4" - - "6" - - "8" - - "10" diff --git a/node_modules/duplexify/LICENSE b/node_modules/duplexify/LICENSE deleted file mode 100644 index 757562e..0000000 --- a/node_modules/duplexify/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/duplexify/README.md b/node_modules/duplexify/README.md deleted file mode 100644 index 8352900..0000000 --- a/node_modules/duplexify/README.md +++ /dev/null @@ -1,97 +0,0 @@ -# duplexify - -Turn a writeable and readable stream into a single streams2 duplex stream. - -Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input -and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)` - -``` -npm install duplexify -``` - -[![build status](http://img.shields.io/travis/mafintosh/duplexify.svg?style=flat)](http://travis-ci.org/mafintosh/duplexify) - -## Usage - -Use `duplexify(writable, readable, streamOptions)` (or `duplexify.obj(writable, readable)` to create an object stream) - -``` js -var duplexify = require('duplexify') - -// turn writableStream and readableStream into a single duplex stream -var dup = duplexify(writableStream, readableStream) - -dup.write('hello world') // will write to writableStream -dup.on('data', function(data) { - // will read from readableStream -}) -``` - -You can also set the readable and writable parts asynchronously - -``` js -var dup = duplexify() - -dup.write('hello world') // write will buffer until the writable - // part has been set - -// wait a bit ... -dup.setReadable(readableStream) - -// maybe wait some more? -dup.setWritable(writableStream) -``` - -If you call `setReadable` or `setWritable` multiple times it will unregister the previous readable/writable stream. -To disable the readable or writable part call `setReadable` or `setWritable` with `null`. - -If the readable or writable streams emits an error or close it will destroy both streams and bubble up the event. -You can also explicitly destroy the streams by calling `dup.destroy()`. The `destroy` method optionally takes an -error object as argument, in which case the error is emitted as part of the `error` event. - -``` js -dup.on('error', function(err) { - console.log('readable or writable emitted an error - close will follow') -}) - -dup.on('close', function() { - console.log('the duplex stream is destroyed') -}) - -dup.destroy() // calls destroy on the readable and writable part (if present) -``` - -## HTTP request example - -Turn a node core http request into a duplex stream is as easy as - -``` js -var duplexify = require('duplexify') -var http = require('http') - -var request = function(opts) { - var req = http.request(opts) - var dup = duplexify(req) - req.on('response', function(res) { - dup.setReadable(res) - }) - return dup -} - -var req = request({ - method: 'GET', - host: 'www.google.com', - port: 80 -}) - -req.end() -req.pipe(process.stdout) -``` - -## License - -MIT - -## Related - -`duplexify` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/duplexify/example.js b/node_modules/duplexify/example.js deleted file mode 100644 index 5585c19..0000000 --- a/node_modules/duplexify/example.js +++ /dev/null @@ -1,21 +0,0 @@ -var duplexify = require('duplexify') -var http = require('http') - -var request = function(opts) { - var req = http.request(opts) - var dup = duplexify() - dup.setWritable(req) - req.on('response', function(res) { - dup.setReadable(res) - }) - return dup -} - -var req = request({ - method: 'GET', - host: 'www.google.com', - port: 80 -}) - -req.end() -req.pipe(process.stdout) diff --git a/node_modules/duplexify/index.js b/node_modules/duplexify/index.js deleted file mode 100644 index b0e8a3e..0000000 --- a/node_modules/duplexify/index.js +++ /dev/null @@ -1,238 +0,0 @@ -var stream = require('readable-stream') -var eos = require('end-of-stream') -var inherits = require('inherits') -var shift = require('stream-shift') - -var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) - ? Buffer.from([0]) - : new Buffer([0]) - -var onuncork = function(self, fn) { - if (self._corked) self.once('uncork', fn) - else fn() -} - -var autoDestroy = function (self, err) { - if (self._autoDestroy) self.destroy(err) -} - -var destroyer = function(self, end) { - return function(err) { - if (err) autoDestroy(self, err.message === 'premature close' ? null : err) - else if (end && !self._ended) self.end() - } -} - -var end = function(ws, fn) { - if (!ws) return fn() - if (ws._writableState && ws._writableState.finished) return fn() - if (ws._writableState) return ws.end(fn) - ws.end() - fn() -} - -var noop = function() {} - -var toStreams2 = function(rs) { - return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) -} - -var Duplexify = function(writable, readable, opts) { - if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) - stream.Duplex.call(this, opts) - - this._writable = null - this._readable = null - this._readable2 = null - - this._autoDestroy = !opts || opts.autoDestroy !== false - this._forwardDestroy = !opts || opts.destroy !== false - this._forwardEnd = !opts || opts.end !== false - this._corked = 1 // start corked - this._ondrain = null - this._drained = false - this._forwarding = false - this._unwrite = null - this._unread = null - this._ended = false - - this.destroyed = false - - if (writable) this.setWritable(writable) - if (readable) this.setReadable(readable) -} - -inherits(Duplexify, stream.Duplex) - -Duplexify.obj = function(writable, readable, opts) { - if (!opts) opts = {} - opts.objectMode = true - opts.highWaterMark = 16 - return new Duplexify(writable, readable, opts) -} - -Duplexify.prototype.cork = function() { - if (++this._corked === 1) this.emit('cork') -} - -Duplexify.prototype.uncork = function() { - if (this._corked && --this._corked === 0) this.emit('uncork') -} - -Duplexify.prototype.setWritable = function(writable) { - if (this._unwrite) this._unwrite() - - if (this.destroyed) { - if (writable && writable.destroy) writable.destroy() - return - } - - if (writable === null || writable === false) { - this.end() - return - } - - var self = this - var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) - - var ondrain = function() { - var ondrain = self._ondrain - self._ondrain = null - if (ondrain) ondrain() - } - - var clear = function() { - self._writable.removeListener('drain', ondrain) - unend() - } - - if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks - - this._writable = writable - this._writable.on('drain', ondrain) - this._unwrite = clear - - this.uncork() // always uncork setWritable -} - -Duplexify.prototype.setReadable = function(readable) { - if (this._unread) this._unread() - - if (this.destroyed) { - if (readable && readable.destroy) readable.destroy() - return - } - - if (readable === null || readable === false) { - this.push(null) - this.resume() - return - } - - var self = this - var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) - - var onreadable = function() { - self._forward() - } - - var onend = function() { - self.push(null) - } - - var clear = function() { - self._readable2.removeListener('readable', onreadable) - self._readable2.removeListener('end', onend) - unend() - } - - this._drained = true - this._readable = readable - this._readable2 = readable._readableState ? readable : toStreams2(readable) - this._readable2.on('readable', onreadable) - this._readable2.on('end', onend) - this._unread = clear - - this._forward() -} - -Duplexify.prototype._read = function() { - this._drained = true - this._forward() -} - -Duplexify.prototype._forward = function() { - if (this._forwarding || !this._readable2 || !this._drained) return - this._forwarding = true - - var data - - while (this._drained && (data = shift(this._readable2)) !== null) { - if (this.destroyed) continue - this._drained = this.push(data) - } - - this._forwarding = false -} - -Duplexify.prototype.destroy = function(err, cb) { - if (!cb) cb = noop - if (this.destroyed) return cb(null) - this.destroyed = true - - var self = this - process.nextTick(function() { - self._destroy(err) - cb(null) - }) -} - -Duplexify.prototype._destroy = function(err) { - if (err) { - var ondrain = this._ondrain - this._ondrain = null - if (ondrain) ondrain(err) - else this.emit('error', err) - } - - if (this._forwardDestroy) { - if (this._readable && this._readable.destroy) this._readable.destroy() - if (this._writable && this._writable.destroy) this._writable.destroy() - } - - this.emit('close') -} - -Duplexify.prototype._write = function(data, enc, cb) { - if (this.destroyed) return - if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) - if (data === SIGNAL_FLUSH) return this._finish(cb) - if (!this._writable) return cb() - - if (this._writable.write(data) === false) this._ondrain = cb - else if (!this.destroyed) cb() -} - -Duplexify.prototype._finish = function(cb) { - var self = this - this.emit('preend') - onuncork(this, function() { - end(self._forwardEnd && self._writable, function() { - // haxx to not emit prefinish twice - if (self._writableState.prefinished === false) self._writableState.prefinished = true - self.emit('prefinish') - onuncork(self, cb) - }) - }) -} - -Duplexify.prototype.end = function(data, enc, cb) { - if (typeof data === 'function') return this.end(null, null, data) - if (typeof enc === 'function') return this.end(data, null, enc) - this._ended = true - if (data) this.write(data) - if (!this._writableState.ending) this.write(SIGNAL_FLUSH) - return stream.Writable.prototype.end.call(this, cb) -} - -module.exports = Duplexify diff --git a/node_modules/duplexify/package.json b/node_modules/duplexify/package.json deleted file mode 100644 index fa43d8d..0000000 --- a/node_modules/duplexify/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_from": "duplexify@^4.1.1", - "_id": "duplexify@4.1.1", - "_inBundle": false, - "_integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", - "_location": "/duplexify", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "duplexify@^4.1.1", - "name": "duplexify", - "escapedName": "duplexify", - "rawSpec": "^4.1.1", - "saveSpec": null, - "fetchSpec": "^4.1.1" - }, - "_requiredBy": [ - "/pumpify" - ], - "_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", - "_shasum": "7027dc374f157b122a8ae08c2d3ea4d2d953aa61", - "_spec": "duplexify@^4.1.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pumpify", - "author": { - "name": "Mathias Buus" - }, - "bugs": { - "url": "https://github.com/mafintosh/duplexify/issues" - }, - "bundleDependencies": false, - "dependencies": { - "end-of-stream": "^1.4.1", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1", - "stream-shift": "^1.0.0" - }, - "deprecated": false, - "description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input", - "devDependencies": { - "concat-stream": "^1.5.2", - "tape": "^4.0.0", - "through2": "^2.0.0" - }, - "homepage": "https://github.com/mafintosh/duplexify", - "keywords": [ - "duplex", - "streams2", - "streams", - "stream", - "writable", - "readable", - "async" - ], - "license": "MIT", - "main": "index.js", - "name": "duplexify", - "repository": { - "type": "git", - "url": "git://github.com/mafintosh/duplexify.git" - }, - "scripts": { - "test": "tape test.js" - }, - "version": "4.1.1" -} diff --git a/node_modules/duplexify/test.js b/node_modules/duplexify/test.js deleted file mode 100644 index a01c126..0000000 --- a/node_modules/duplexify/test.js +++ /dev/null @@ -1,338 +0,0 @@ -var tape = require('tape') -var through = require('through2') -var concat = require('concat-stream') -var stream = require('readable-stream') -var net = require('net') -var duplexify = require('./') - -var HELLO_WORLD = (Buffer.from && Buffer.from !== Uint8Array.from) - ? Buffer.from('hello world') - : new Buffer('hello world') - -tape('passthrough', function(t) { - t.plan(2) - - var pt = through() - var dup = duplexify(pt, pt) - - dup.end('hello world') - dup.on('finish', function() { - t.ok(true, 'should finish') - }) - dup.pipe(concat(function(data) { - t.same(data.toString(), 'hello world', 'same in as out') - })) -}) - -tape('passthrough + double end', function(t) { - t.plan(2) - - var pt = through() - var dup = duplexify(pt, pt) - - dup.end('hello world') - dup.end() - - dup.on('finish', function() { - t.ok(true, 'should finish') - }) - dup.pipe(concat(function(data) { - t.same(data.toString(), 'hello world', 'same in as out') - })) -}) - -tape('async passthrough + end', function(t) { - t.plan(2) - - var pt = through.obj({highWaterMark:1}, function(data, enc, cb) { - setTimeout(function() { - cb(null, data) - }, 100) - }) - - var dup = duplexify(pt, pt) - - dup.write('hello ') - dup.write('world') - dup.end() - - dup.on('finish', function() { - t.ok(true, 'should finish') - }) - dup.pipe(concat(function(data) { - t.same(data.toString(), 'hello world', 'same in as out') - })) -}) - -tape('duplex', function(t) { - var readExpected = ['read-a', 'read-b', 'read-c'] - var writeExpected = ['write-a', 'write-b', 'write-c'] - - t.plan(readExpected.length+writeExpected.length+2) - - var readable = through.obj() - var writable = through.obj(function(data, enc, cb) { - t.same(data, writeExpected.shift(), 'onwrite should match') - cb() - }) - - var dup = duplexify.obj(writable, readable) - - readExpected.slice().forEach(function(data) { - readable.write(data) - }) - readable.end() - - writeExpected.slice().forEach(function(data) { - dup.write(data) - }) - dup.end() - - dup.on('data', function(data) { - t.same(data, readExpected.shift(), 'ondata should match') - }) - dup.on('end', function() { - t.ok(true, 'should end') - }) - dup.on('finish', function() { - t.ok(true, 'should finish') - }) -}) - -tape('async', function(t) { - var dup = duplexify() - var pt = through() - - dup.pipe(concat(function(data) { - t.same(data.toString(), 'i was async', 'same in as out') - t.end() - })) - - dup.write('i') - dup.write(' was ') - dup.end('async') - - setTimeout(function() { - dup.setWritable(pt) - setTimeout(function() { - dup.setReadable(pt) - }, 50) - }, 50) -}) - -tape('destroy', function(t) { - t.plan(2) - - var write = through() - var read = through() - var dup = duplexify(write, read) - - write.destroy = function() { - t.ok(true, 'write destroyed') - } - - dup.on('close', function() { - t.ok(true, 'close emitted') - }) - - dup.destroy() - dup.destroy() // should only work once -}) - -tape('destroy both', function(t) { - t.plan(3) - - var write = through() - var read = through() - var dup = duplexify(write, read) - - write.destroy = function() { - t.ok(true, 'write destroyed') - } - - read.destroy = function() { - t.ok(true, 'read destroyed') - } - - dup.on('close', function() { - t.ok(true, 'close emitted') - }) - - dup.destroy() - dup.destroy() // should only work once -}) - -tape('bubble read errors', function(t) { - t.plan(2) - - var write = through() - var read = through() - var dup = duplexify(write, read) - - dup.on('error', function(err) { - t.same(err.message, 'read-error', 'received read error') - }) - dup.on('close', function() { - t.ok(true, 'close emitted') - }) - - read.emit('error', new Error('read-error')) - write.emit('error', new Error('write-error')) // only emit first error -}) - -tape('bubble write errors', function(t) { - t.plan(2) - - var write = through() - var read = through() - var dup = duplexify(write, read) - - dup.on('error', function(err) { - t.same(err.message, 'write-error', 'received write error') - }) - dup.on('close', function() { - t.ok(true, 'close emitted') - }) - - write.emit('error', new Error('write-error')) - read.emit('error', new Error('read-error')) // only emit first error -}) - -tape('bubble errors from write()', function(t) { - t.plan(3) - - var errored = false - var dup = duplexify(new stream.Writable({ - write: function(chunk, enc, next) { - next(new Error('write-error')) - } - })) - - dup.on('error', function(err) { - errored = true - t.same(err.message, 'write-error', 'received write error') - }) - dup.on('close', function() { - t.pass('close emitted') - t.ok(errored, 'error was emitted before close') - }) - dup.end('123') -}) - -tape('destroy while waiting for drain', function(t) { - t.plan(3) - - var errored = false - var dup = duplexify(new stream.Writable({ - highWaterMark: 0, - write: function() {} - })) - - dup.on('error', function(err) { - errored = true - t.same(err.message, 'destroy-error', 'received destroy error') - }) - dup.on('close', function() { - t.pass('close emitted') - t.ok(errored, 'error was emitted before close') - }) - dup.write('123') - dup.destroy(new Error('destroy-error')) -}) - -tape('reset writable / readable', function(t) { - t.plan(3) - - var toUpperCase = function(data, enc, cb) { - cb(null, data.toString().toUpperCase()) - } - - var passthrough = through() - var upper = through(toUpperCase) - var dup = duplexify(passthrough, passthrough) - - dup.once('data', function(data) { - t.same(data.toString(), 'hello') - dup.setWritable(upper) - dup.setReadable(upper) - dup.once('data', function(data) { - t.same(data.toString(), 'HELLO') - dup.once('data', function(data) { - t.same(data.toString(), 'HI') - t.end() - }) - }) - dup.write('hello') - dup.write('hi') - }) - dup.write('hello') -}) - -tape('cork', function(t) { - var passthrough = through() - var dup = duplexify(passthrough, passthrough) - var ok = false - - dup.on('prefinish', function() { - dup.cork() - setTimeout(function() { - ok = true - dup.uncork() - }, 100) - }) - dup.on('finish', function() { - t.ok(ok) - t.end() - }) - dup.end() -}) - -tape('prefinish not twice', function(t) { - var passthrough = through() - var dup = duplexify(passthrough, passthrough) - var prefinished = false - - dup.on('prefinish', function() { - t.ok(!prefinished, 'only prefinish once') - prefinished = true - }) - - dup.on('finish', function() { - t.end() - }) - - dup.end() -}) - -tape('close', function(t) { - var passthrough = through() - var dup = duplexify(passthrough, passthrough) - - passthrough.emit('close') - dup.on('close', function() { - t.ok(true, 'should forward close') - t.end() - }) -}) - -tape('works with node native streams (net)', function(t) { - t.plan(1) - - var server = net.createServer(function(socket) { - var dup = duplexify(socket, socket) - - dup.once('data', function(chunk) { - t.same(chunk, HELLO_WORLD) - server.close() - socket.end() - t.end() - }) - }) - - server.listen(0, function () { - var socket = net.connect(server.address().port) - var dup = duplexify(socket, socket) - - dup.write(HELLO_WORLD) - }) -}) diff --git a/node_modules/end-of-stream/LICENSE b/node_modules/end-of-stream/LICENSE deleted file mode 100644 index 757562e..0000000 --- a/node_modules/end-of-stream/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/end-of-stream/README.md b/node_modules/end-of-stream/README.md deleted file mode 100644 index 857b14b..0000000 --- a/node_modules/end-of-stream/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# end-of-stream - -A node module that calls a callback when a readable/writable/duplex stream has completed or failed. - - npm install end-of-stream - -[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) - -## Usage - -Simply pass a stream and a callback to the `eos`. -Both legacy streams, streams2 and stream3 are supported. - -``` js -var eos = require('end-of-stream'); - -eos(readableStream, function(err) { - // this will be set to the stream instance - if (err) return console.log('stream had an error or closed early'); - console.log('stream has ended', this === readableStream); -}); - -eos(writableStream, function(err) { - if (err) return console.log('stream had an error or closed early'); - console.log('stream has finished', this === writableStream); -}); - -eos(duplexStream, function(err) { - if (err) return console.log('stream had an error or closed early'); - console.log('stream has ended and finished', this === duplexStream); -}); - -eos(duplexStream, {readable:false}, function(err) { - if (err) return console.log('stream had an error or closed early'); - console.log('stream has finished but might still be readable'); -}); - -eos(duplexStream, {writable:false}, function(err) { - if (err) return console.log('stream had an error or closed early'); - console.log('stream has ended but might still be writable'); -}); - -eos(readableStream, {error:false}, function(err) { - // do not treat emit('error', err) as a end-of-stream -}); -``` - -## License - -MIT - -## Related - -`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/end-of-stream/index.js b/node_modules/end-of-stream/index.js deleted file mode 100644 index c77f0d5..0000000 --- a/node_modules/end-of-stream/index.js +++ /dev/null @@ -1,94 +0,0 @@ -var once = require('once'); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; diff --git a/node_modules/end-of-stream/package.json b/node_modules/end-of-stream/package.json deleted file mode 100644 index d81a5f4..0000000 --- a/node_modules/end-of-stream/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_from": "end-of-stream@^1.1.0", - "_id": "end-of-stream@1.4.4", - "_inBundle": false, - "_integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "_location": "/end-of-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "end-of-stream@^1.1.0", - "name": "end-of-stream", - "escapedName": "end-of-stream", - "rawSpec": "^1.1.0", - "saveSpec": null, - "fetchSpec": "^1.1.0" - }, - "_requiredBy": [ - "/duplexify", - "/pump" - ], - "_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "_shasum": "5ae64a5f45057baf3626ec14da0ca5e4b2431eb0", - "_spec": "end-of-stream@^1.1.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pump", - "author": { - "name": "Mathias Buus", - "email": "mathiasbuus@gmail.com" - }, - "bugs": { - "url": "https://github.com/mafintosh/end-of-stream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "once": "^1.4.0" - }, - "deprecated": false, - "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", - "devDependencies": { - "tape": "^4.11.0" - }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/mafintosh/end-of-stream", - "keywords": [ - "stream", - "streams", - "callback", - "finish", - "close", - "end", - "wait" - ], - "license": "MIT", - "main": "index.js", - "name": "end-of-stream", - "repository": { - "type": "git", - "url": "git://github.com/mafintosh/end-of-stream.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.4.4" -} diff --git a/node_modules/fast-fifo/LICENSE b/node_modules/fast-fifo/LICENSE deleted file mode 100644 index c728dc7..0000000 --- a/node_modules/fast-fifo/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/fast-fifo/README.md b/node_modules/fast-fifo/README.md deleted file mode 100644 index a0c5ebe..0000000 --- a/node_modules/fast-fifo/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# fast-fifo - -A fast fifo implementation similar to the one powering nextTick in Node.js core - -``` -npm install fast-fifo -``` - -Uses a linked list of growing fixed sized arrays to implement the FIFO to avoid -allocating a wrapper object for each item. - -## Usage - -``` js -const FIFO = require('fast-fifo') - -const q = new FIFO() - -q.push('hello') -q.push('world') - -q.shift() // returns hello -q.shift() // returns world -``` - -## API - -#### `q = new FIFO()` - -Create a new FIFO. - -#### `q.push(value)` - -Push a value to the FIFO. `value` can be anything other than undefined. - -#### `value = q.shift()` - -Return the oldest value from the FIFO. - -#### `bool = q.isEmpty()` - -Returns `true` if the FIFO is empty and false otherwise. - -## Benchmarks - -Included in bench.js is a simple benchmark that benchmarks this against a simple -linked list based FIFO. - -On my machine the benchmark looks like this: - -``` -fifo bulk push and shift: 2881.508ms -fifo individual push and shift: 3248.437ms -fast-fifo bulk push and shift: 1606.972ms -fast-fifo individual push and shift: 1328.064ms -fifo bulk push and shift: 3266.902ms -fifo individual push and shift: 3320.944ms -fast-fifo bulk push and shift: 1858.307ms -fast-fifo individual push and shift: 1516.983ms -``` - -YMMV - -## License - -MIT diff --git a/node_modules/fast-fifo/bench.js b/node_modules/fast-fifo/bench.js deleted file mode 100644 index 28a9c11..0000000 --- a/node_modules/fast-fifo/bench.js +++ /dev/null @@ -1,34 +0,0 @@ -const FastFIFO = require('./') -const FIFO = require('fifo') - -run(new FIFO(), 'fifo') -run(new FastFIFO(), 'fast-fifo') -run(new FIFO(), 'fifo') -run(new FastFIFO(), 'fast-fifo') - -function run (q, prefix) { - const runs = 1024 - - console.time(prefix + ' bulk push and shift') - - for (let j = 0; j < 1e5; j++) { - for (let i = 0; i < runs; i++) { - q.push(i) - } - for (let i = 0; i < runs; i++) { - q.shift() - } - } - - console.timeEnd(prefix + ' bulk push and shift') - console.time(prefix + ' individual push and shift') - - for (let j = 0; j < 1e5; j++) { - for (let i = 0; i < runs; i++) { - q.push(i) - q.shift() - } - } - - console.timeEnd(prefix + ' individual push and shift') -} diff --git a/node_modules/fast-fifo/fixed-size.js b/node_modules/fast-fifo/fixed-size.js deleted file mode 100644 index c406704..0000000 --- a/node_modules/fast-fifo/fixed-size.js +++ /dev/null @@ -1,29 +0,0 @@ -module.exports = class FixedFIFO { - constructor (hwm) { - if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two') - this.buffer = new Array(hwm) - this.mask = hwm - 1 - this.top = 0 - this.btm = 0 - this.next = null - } - - push (data) { - if (this.buffer[this.top] !== undefined) return false - this.buffer[this.top] = data - this.top = (this.top + 1) & this.mask - return true - } - - shift () { - const last = this.buffer[this.btm] - if (last === undefined) return undefined - this.buffer[this.btm] = undefined - this.btm = (this.btm + 1) & this.mask - return last - } - - isEmpty () { - return this.buffer[this.btm] === undefined - } -} diff --git a/node_modules/fast-fifo/index.js b/node_modules/fast-fifo/index.js deleted file mode 100644 index f65b0d1..0000000 --- a/node_modules/fast-fifo/index.js +++ /dev/null @@ -1,32 +0,0 @@ -const FixedFIFO = require('./fixed-size') - -module.exports = class FastFIFO { - constructor (hwm) { - this.hwm = hwm || 16 - this.head = new FixedFIFO(this.hwm) - this.tail = this.head - } - - push (val) { - if (!this.head.push(val)) { - const prev = this.head - this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length) - this.head.push(val) - } - } - - shift () { - const val = this.tail.shift() - if (val === undefined && this.tail.next) { - const next = this.tail.next - this.tail.next = null - this.tail = next - return this.tail.shift() - } - return val - } - - isEmpty () { - return this.head.isEmpty() - } -} diff --git a/node_modules/fast-fifo/package.json b/node_modules/fast-fifo/package.json deleted file mode 100644 index 1cc2ab9..0000000 --- a/node_modules/fast-fifo/package.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "_from": "fast-fifo@^1.0.0", - "_id": "fast-fifo@1.0.0", - "_inBundle": false, - "_integrity": "sha512-4VEXmjxLj7sbs8J//cn2qhRap50dGzF5n8fjay8mau+Jn4hxSeR3xPFwxMaQq/pDaq7+KQk0PAbC2+nWDkJrmQ==", - "_location": "/fast-fifo", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "fast-fifo@^1.0.0", - "name": "fast-fifo", - "escapedName": "fast-fifo", - "rawSpec": "^1.0.0", - "saveSpec": null, - "fetchSpec": "^1.0.0" - }, - "_requiredBy": [ - "/streamx" - ], - "_resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.0.0.tgz", - "_shasum": "9bc72e6860347bb045a876d1c5c0af11e9b984e7", - "_spec": "fast-fifo@^1.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/streamx", - "author": { - "name": "Mathias Buus", - "url": "@mafintosh" - }, - "bugs": { - "url": "https://github.com/mafintosh/fast-fifo/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "A fast fifo implementation similar to the one powering nextTick in Node.js core", - "devDependencies": { - "standard": "^12.0.1", - "tape": "^4.10.1" - }, - "homepage": "https://github.com/mafintosh/fast-fifo", - "license": "MIT", - "main": "index.js", - "name": "fast-fifo", - "repository": { - "type": "git", - "url": "git+https://github.com/mafintosh/fast-fifo.git" - }, - "scripts": { - "test": "standard && tape test.js" - }, - "version": "1.0.0" -} diff --git a/node_modules/fast-fifo/test.js b/node_modules/fast-fifo/test.js deleted file mode 100644 index 0946c3e..0000000 --- a/node_modules/fast-fifo/test.js +++ /dev/null @@ -1,38 +0,0 @@ -const tape = require('tape') -const FIFO = require('./') - -tape('basic', function (t) { - const q = new FIFO() - const values = [ - 1, - 4, - 4, - 0, - null, - {}, - Math.random(), - '', - 'hello', - 9, - 1, - 4, - 5, - 6, - 7, - null, - null, - 0, - 0, - 15, - 52.2, - null - ] - - t.same(q.shift(), undefined) - t.ok(q.isEmpty()) - for (const value of values) q.push(value) - while (!q.isEmpty()) t.same(q.shift(), values.shift()) - t.same(q.shift(), undefined) - t.ok(q.isEmpty()) - t.end() -}) diff --git a/node_modules/fetch-errors/.github/workflows/tests.yml b/node_modules/fetch-errors/.github/workflows/tests.yml deleted file mode 100644 index 0dd1b93..0000000 --- a/node_modules/fetch-errors/.github/workflows/tests.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: tests - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [12.x] - - steps: - - uses: actions/checkout@v1 - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: npm install, build, and test - run: | - npm i - npm test - env: - CI: true diff --git a/node_modules/fetch-errors/CHANGELOG.md b/node_modules/fetch-errors/CHANGELOG.md deleted file mode 100644 index ebd1941..0000000 --- a/node_modules/fetch-errors/CHANGELOG.md +++ /dev/null @@ -1,15 +0,0 @@ -# fetch-errors Change Log -All notable changes to this project will be documented in this file. -This project adheres to [Semantic Versioning](http://semver.org/). - -## 2.0.0 - 2020-02-10 - -* Remove esm, export CJS. ESM doesn't provide any advantage in Node.js modueles. - -## 1.0.1 - 2019-11-19 - -* Add missing devdep. - -## 1.0.0 - 2019-11-19 - -* Initial commit diff --git a/node_modules/fetch-errors/CODE_OF_CONDUCT.md b/node_modules/fetch-errors/CODE_OF_CONDUCT.md deleted file mode 100644 index 4caa43a..0000000 --- a/node_modules/fetch-errors/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,4 +0,0 @@ -# Code of conduct - -- This repo is governed as a dictatorship starting with the originator of the project. -- No malevolence tolerated whatsoever. diff --git a/node_modules/fetch-errors/CONTRIBUTING.md b/node_modules/fetch-errors/CONTRIBUTING.md deleted file mode 100644 index afac12a..0000000 --- a/node_modules/fetch-errors/CONTRIBUTING.md +++ /dev/null @@ -1,11 +0,0 @@ -# Contributing - -- Contributors reserve the right to walk away from this project at any moment with or without notice. -- Questions are welcome, however unless there is a official support contract established between the maintainers and the requester, support is not guaranteed. -- Patches, ideas and changes welcome. -- Fixes almost always welcome. -- Features sometimes welcome. Please open an issue to discuss the issue prior to spending lots of time on the problem. It may be rejected. If you don't want to wait around for the discussion to commence, and you really want to jump into the implementation work, be prepared for fork if the idea is respectfully declined. -- Try to stay within the style of the existing code. -- All tests must pass. -- Additional features or code paths must be tested. -- Aim for 100% coverage. diff --git a/node_modules/fetch-errors/LICENSE b/node_modules/fetch-errors/LICENSE deleted file mode 100644 index 7524c61..0000000 --- a/node_modules/fetch-errors/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Bret Comnes - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/fetch-errors/README.md b/node_modules/fetch-errors/README.md deleted file mode 100644 index 506a823..0000000 --- a/node_modules/fetch-errors/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# fetch-errors -[![Actions Status](https://github.com/bcomnes/fetch-errors/workflows/tests/badge.svg)](https://github.com/bcomnes/fetch-errors/actions) - -Error subclasses for Text and JSON `fetch` response bodies, and a `handleResponse` fuction to handle fetch responses cleanly. - -``` -npm install fetch-errors -``` - -## Usage - -``` js -const { - HTTPError, - TextHTTPError, - JSONHTTPError, - handleResponse -} = require('fetch-errors'); - -window.fetch('https://api.github.com/users/bcomnes/repos') - .then(handleResponse) - .then(json => { console.log(json); }) - .catch(err => { - switch (err.constructor) { - case JSONHTTPError: { - console.error(err.message); - console.error(err.status); - console.error(err.json); - console.error(err.stack); - break; - } - case TextHTTPError: { - console.error(err.message); - console.error(err.status); - console.error(err.data); - console.error(err.stack); - break; - } - case HTTPError: { - console.error(err.message); - console.error(err.status); - console.error(err.stack); - break; - } - default: { - console.error(err); - break; - } - } - }); -``` - -## API - -### `async handleResponse(response)` - -Parse JSON or text bodies of [`fetch`][fetch] [`Response`][response] objects. Intended for APIs that return JSON, but falls back to `response.text()` body reading when the `json` `Content-type` is missing. If `response.ok`, returns a JS object (if JSON), or the text content of the response body. Otherwise, returns a `JSONHTTPError` or `TextHTTPError`. If if `response.json()` or `resonse.text()` will throw their [native error]() objects. - -### `class HTTPError extends Error` - -Additional error properties from Error - -```js -{ - stack, // stack trace of error - status // status code of response -} -``` - -### `class TextHTTPError extends HTTPError` - -Additional error properties from HTTPError - -```js -{ - data // data of text response -} -``` - -### `class JSONHTTPError extends HTTPError` - -Additional error properties from HTTPError - -```js -{ - json // json of a JSON response -} -``` - -### See also - -- [netlify/micro-api-client](https://github.com/netlify/micro-api-client): These errors were extracted from netlify/micro-api-client for individual use. - - -## License - -MIT - -[response]: https://developer.mozilla.org/en-US/docs/Web/API/Response -[fetch]: https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API diff --git a/node_modules/fetch-errors/index.js b/node_modules/fetch-errors/index.js deleted file mode 100644 index 883f14c..0000000 --- a/node_modules/fetch-errors/index.js +++ /dev/null @@ -1,45 +0,0 @@ -async function handleResponse (response) { - const contentType = response.headers.get('Content-Type'); - const isJSON = contentType && contentType.match(/json/); - const data = isJSON ? await response.json() : await response.text(); - if (response.ok) return data; - else { - return isJSON - ? Promise.reject(new JSONHTTPError(response, data)) - : Promise.reject(new TextHTTPError(response, data)); - } -} - -class HTTPError extends Error { - constructor (response) { - super(response.statusText); - this.name = this.constructor.name; - if (typeof Error.captureStackTrace === 'function') { - Error.captureStackTrace(this, this.constructor); - } else { - this.stack = new Error(response.statusText).stack; - } - this.status = response.status; - } -} - -class TextHTTPError extends HTTPError { - constructor (response, data) { - super(response); - this.data = data; - } -} - -class JSONHTTPError extends HTTPError { - constructor (response, json) { - super(response); - this.json = json; - } -} - -module.exports = { - handleResponse, - HTTPError, - TextHTTPError, - JSONHTTPError -}; diff --git a/node_modules/fetch-errors/package.json b/node_modules/fetch-errors/package.json deleted file mode 100644 index 9c04217..0000000 --- a/node_modules/fetch-errors/package.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_from": "fetch-errors@^2.0.1", - "_id": "fetch-errors@2.0.1", - "_inBundle": false, - "_integrity": "sha512-U+pPCefRg8OwN0VARqAOCEyJs+kbSusskhW3XQPYCQMBUUNy3VtK9OYyySlybDQmMkZzqxWlGY9SjKCve1saJw==", - "_location": "/fetch-errors", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "fetch-errors@^2.0.1", - "name": "fetch-errors", - "escapedName": "fetch-errors", - "rawSpec": "^2.0.1", - "saveSpec": null, - "fetchSpec": "^2.0.1" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/fetch-errors/-/fetch-errors-2.0.1.tgz", - "_shasum": "26616c7c22a0a386155ba8e53af27f98960e47dc", - "_spec": "fetch-errors@^2.0.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Bret Comnes", - "email": "bcomnes@gmail.com", - "url": "https://bret.io" - }, - "bugs": { - "url": "https://github.com/bcomnes/fetch-errors/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "WIP - nothing to see here", - "devDependencies": { - "dependency-check": "^4.1.0", - "node-fetch": "^2.6.0", - "npm-run-all": "^4.1.5", - "semistandard": "^14.2.0", - "tape": "^4.11.0", - "tape-promise": "^4.0.0" - }, - "homepage": "https://github.com/bcomnes/fetch-errors", - "keywords": [], - "license": "MIT", - "main": "index.js", - "name": "fetch-errors", - "repository": { - "type": "git", - "url": "git+https://github.com/bcomnes/fetch-errors.git" - }, - "scripts": { - "test": "run-s test:*", - "test:deps": "dependency-check . --no-dev --no-peer", - "test:semistandard": "semistandard", - "test:tape": "tape test.js" - }, - "semistandard": { - "ignore": [ - "dist" - ] - }, - "version": "2.0.1" -} diff --git a/node_modules/fetch-errors/test.js b/node_modules/fetch-errors/test.js deleted file mode 100644 index de00964..0000000 --- a/node_modules/fetch-errors/test.js +++ /dev/null @@ -1,18 +0,0 @@ -const tape = require('tape'); -const ptape = require('tape-promise').default; -const { handleResponse } = require('.'); -const fetch = require('node-fetch'); -const test = ptape(tape); - -test('handleResponse', async t => { - return fetch('https://api.github.com/users/bcomnes/repos') - .then(response => { - t.ok(response.ok); - return response; - }) - .then(handleResponse) - .then(json => { - t.ok(json); - }) - .catch(t.error); -}); diff --git a/node_modules/form-data/License b/node_modules/form-data/License deleted file mode 100644 index c7ff12a..0000000 --- a/node_modules/form-data/License +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. diff --git a/node_modules/form-data/README.md b/node_modules/form-data/README.md deleted file mode 100644 index 42f630f..0000000 --- a/node_modules/form-data/README.md +++ /dev/null @@ -1,351 +0,0 @@ -# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) - -A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. - -The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. - -[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface - -[![Linux Build](https://img.shields.io/travis/form-data/form-data/v3.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) -[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v3.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) -[![Windows Build](https://img.shields.io/travis/form-data/form-data/v3.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) - -[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v3.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) -[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) - -## Install - -``` -npm install --save form-data -``` - -## Usage - -In this example we are constructing a form with 3 fields that contain a string, -a buffer and a file stream. - -``` javascript -var FormData = require('form-data'); -var fs = require('fs'); - -var form = new FormData(); -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_file', fs.createReadStream('/foo/bar.jpg')); -``` - -Also you can use http-response stream: - -``` javascript -var FormData = require('form-data'); -var http = require('http'); - -var form = new FormData(); - -http.request('http://nodejs.org/images/logo.png', function(response) { - form.append('my_field', 'my value'); - form.append('my_buffer', new Buffer(10)); - form.append('my_logo', response); -}); -``` - -Or @mikeal's [request](https://github.com/request/request) stream: - -``` javascript -var FormData = require('form-data'); -var request = require('request'); - -var form = new FormData(); - -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_logo', request('http://nodejs.org/images/logo.png')); -``` - -In order to submit this form to a web application, call ```submit(url, [callback])``` method: - -``` javascript -form.submit('http://example.org/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -}); - -``` - -For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. - -### Custom options - -You can provide custom options, such as `maxDataSize`: - -``` javascript -var FormData = require('form-data'); - -var form = new FormData({ maxDataSize: 20971520 }); -form.append('my_field', 'my value'); -form.append('my_buffer', /* something big */); -``` - -List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) - -### Alternative submission methods - -You can use node's http client interface: - -``` javascript -var http = require('http'); - -var request = http.request({ - method: 'post', - host: 'example.org', - path: '/upload', - headers: form.getHeaders() -}); - -form.pipe(request); - -request.on('response', function(res) { - console.log(res.statusCode); -}); -``` - -Or if you would prefer the `'Content-Length'` header to be set for you: - -``` javascript -form.submit('example.org/upload', function(err, res) { - console.log(res.statusCode); -}); -``` - -To use custom headers and pre-known length in parts: - -``` javascript -var CRLF = '\r\n'; -var form = new FormData(); - -var options = { - header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, - knownLength: 1 -}; - -form.append('my_buffer', buffer, options); - -form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); -}); -``` - -Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: - -``` javascript -someModule.stream(function(err, stdout, stderr) { - if (err) throw err; - - var form = new FormData(); - - form.append('file', stdout, { - filename: 'unicycle.jpg', // ... or: - filepath: 'photos/toys/unicycle.jpg', - contentType: 'image/jpeg', - knownLength: 19806 - }); - - form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); - }); -}); -``` - -The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). - -For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: - -``` javascript -form.submit({ - host: 'example.com', - path: '/probably.php?extra=params', - auth: 'username:password' -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: - -``` javascript -form.submit({ - host: 'example.com', - path: '/surelynot.php', - headers: {'x-test-header': 'test-header-value'} -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -### Methods - -- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). -- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) -- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) -- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) -- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) -- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) -- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) -- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) -- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) - -#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) -Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. -```javascript -var form = new FormData(); -form.append( 'my_string', 'my value' ); -form.append( 'my_integer', 1 ); -form.append( 'my_boolean', true ); -form.append( 'my_buffer', new Buffer(10) ); -form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) -``` - -You may provide a string for options, or an object. -```javascript -// Set filename by providing a string for options -form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); - -// provide an object. -form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); -``` - -#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) -This method ads the correct `content-type` header to the provided array of `userHeaders`. - -#### _String_ getBoundary() -Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers -for example: -```javascript ---------------------------515890814546601021194782 -``` -_Note: The boundary must be unique and may not appear in the data._ - -#### _Buffer_ getBuffer() -Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. -```javascript -var form = new FormData(); -form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); -form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); - -axios.post( 'https://example.com/path/to/api', - form.getBuffer(), - form.getHeaders() - ) -``` -**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. - -#### _Integer_ getLengthSync() -Same as `getLength` but synchronous. - -_Note: getLengthSync __doesn't__ calculate streams length._ - -#### _Integer_ getLength( **function** _callback_ ) -Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated -```javascript -this.getLength(function(err, length) { - if (err) { - this._error(err); - return; - } - - // add content length - request.setHeader('Content-Length', length); - - ... -}.bind(this)); -``` - -#### _Boolean_ hasKnownLength() -Checks if the length of added values is known. - -#### _Request_ submit( _params_, **function** _callback_ ) -Submit the form to a web application. -```javascript -var form = new FormData(); -form.append( 'my_string', 'Hello World' ); - -form.submit( 'http://example.com/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -} ); -``` - -#### _String_ toString() -Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. - -### Integration with other libraries - -#### Request - -Form submission using [request](https://github.com/request/request): - -```javascript -var formData = { - my_field: 'my_value', - my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), -}; - -request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { - if (err) { - return console.error('upload failed:', err); - } - console.log('Upload successful! Server responded with:', body); -}); -``` - -For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). - -#### node-fetch - -You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): - -```javascript -var form = new FormData(); - -form.append('a', 1); - -fetch('http://example.com', { method: 'POST', body: form }) - .then(function(res) { - return res.json(); - }).then(function(json) { - console.log(json); - }); -``` - -#### axios - -In Node.js you can post a file using [axios](https://github.com/axios/axios): -```javascript -const form = new FormData(); -const stream = fs.createReadStream(PATH_TO_FILE); - -form.append('image', stream); - -// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` -const formHeaders = form.getHeaders(); - -axios.post('http://example.com', form, { - headers: { - ...formHeaders, - }, -}) -.then(response => response) -.catch(error => error) -``` - -## Notes - -- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. -- Starting version `2.x` FormData has dropped support for `node@0.10.x`. -- Starting version `3.x` FormData has dropped support for `node@4.x`. - -## License - -Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/README.md.bak b/node_modules/form-data/README.md.bak deleted file mode 100644 index e9195bd..0000000 --- a/node_modules/form-data/README.md.bak +++ /dev/null @@ -1,351 +0,0 @@ -# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) - -A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. - -The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. - -[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface - -[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) -[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) -[![Windows Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) - -[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) -[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) - -## Install - -``` -npm install --save form-data -``` - -## Usage - -In this example we are constructing a form with 3 fields that contain a string, -a buffer and a file stream. - -``` javascript -var FormData = require('form-data'); -var fs = require('fs'); - -var form = new FormData(); -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_file', fs.createReadStream('/foo/bar.jpg')); -``` - -Also you can use http-response stream: - -``` javascript -var FormData = require('form-data'); -var http = require('http'); - -var form = new FormData(); - -http.request('http://nodejs.org/images/logo.png', function(response) { - form.append('my_field', 'my value'); - form.append('my_buffer', new Buffer(10)); - form.append('my_logo', response); -}); -``` - -Or @mikeal's [request](https://github.com/request/request) stream: - -``` javascript -var FormData = require('form-data'); -var request = require('request'); - -var form = new FormData(); - -form.append('my_field', 'my value'); -form.append('my_buffer', new Buffer(10)); -form.append('my_logo', request('http://nodejs.org/images/logo.png')); -``` - -In order to submit this form to a web application, call ```submit(url, [callback])``` method: - -``` javascript -form.submit('http://example.org/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -}); - -``` - -For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. - -### Custom options - -You can provide custom options, such as `maxDataSize`: - -``` javascript -var FormData = require('form-data'); - -var form = new FormData({ maxDataSize: 20971520 }); -form.append('my_field', 'my value'); -form.append('my_buffer', /* something big */); -``` - -List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) - -### Alternative submission methods - -You can use node's http client interface: - -``` javascript -var http = require('http'); - -var request = http.request({ - method: 'post', - host: 'example.org', - path: '/upload', - headers: form.getHeaders() -}); - -form.pipe(request); - -request.on('response', function(res) { - console.log(res.statusCode); -}); -``` - -Or if you would prefer the `'Content-Length'` header to be set for you: - -``` javascript -form.submit('example.org/upload', function(err, res) { - console.log(res.statusCode); -}); -``` - -To use custom headers and pre-known length in parts: - -``` javascript -var CRLF = '\r\n'; -var form = new FormData(); - -var options = { - header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, - knownLength: 1 -}; - -form.append('my_buffer', buffer, options); - -form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); -}); -``` - -Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: - -``` javascript -someModule.stream(function(err, stdout, stderr) { - if (err) throw err; - - var form = new FormData(); - - form.append('file', stdout, { - filename: 'unicycle.jpg', // ... or: - filepath: 'photos/toys/unicycle.jpg', - contentType: 'image/jpeg', - knownLength: 19806 - }); - - form.submit('http://example.com/', function(err, res) { - if (err) throw err; - console.log('Done'); - }); -}); -``` - -The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). - -For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: - -``` javascript -form.submit({ - host: 'example.com', - path: '/probably.php?extra=params', - auth: 'username:password' -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: - -``` javascript -form.submit({ - host: 'example.com', - path: '/surelynot.php', - headers: {'x-test-header': 'test-header-value'} -}, function(err, res) { - console.log(res.statusCode); -}); -``` - -### Methods - -- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). -- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) -- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) -- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) -- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) -- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) -- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) -- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) -- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) - -#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) -Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. -```javascript -var form = new FormData(); -form.append( 'my_string', 'my value' ); -form.append( 'my_integer', 1 ); -form.append( 'my_boolean', true ); -form.append( 'my_buffer', new Buffer(10) ); -form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) -``` - -You may provide a string for options, or an object. -```javascript -// Set filename by providing a string for options -form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); - -// provide an object. -form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); -``` - -#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) -This method ads the correct `content-type` header to the provided array of `userHeaders`. - -#### _String_ getBoundary() -Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers -for example: -```javascript ---------------------------515890814546601021194782 -``` -_Note: The boundary must be unique and may not appear in the data._ - -#### _Buffer_ getBuffer() -Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. -```javascript -var form = new FormData(); -form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); -form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); - -axios.post( 'https://example.com/path/to/api', - form.getBuffer(), - form.getHeaders() - ) -``` -**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. - -#### _Integer_ getLengthSync() -Same as `getLength` but synchronous. - -_Note: getLengthSync __doesn't__ calculate streams length._ - -#### _Integer_ getLength( **function** _callback_ ) -Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated -```javascript -this.getLength(function(err, length) { - if (err) { - this._error(err); - return; - } - - // add content length - request.setHeader('Content-Length', length); - - ... -}.bind(this)); -``` - -#### _Boolean_ hasKnownLength() -Checks if the length of added values is known. - -#### _Request_ submit( _params_, **function** _callback_ ) -Submit the form to a web application. -```javascript -var form = new FormData(); -form.append( 'my_string', 'Hello World' ); - -form.submit( 'http://example.com/', function(err, res) { - // res – response object (http.IncomingMessage) // - res.resume(); -} ); -``` - -#### _String_ toString() -Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. - -### Integration with other libraries - -#### Request - -Form submission using [request](https://github.com/request/request): - -```javascript -var formData = { - my_field: 'my_value', - my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), -}; - -request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { - if (err) { - return console.error('upload failed:', err); - } - console.log('Upload successful! Server responded with:', body); -}); -``` - -For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). - -#### node-fetch - -You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): - -```javascript -var form = new FormData(); - -form.append('a', 1); - -fetch('http://example.com', { method: 'POST', body: form }) - .then(function(res) { - return res.json(); - }).then(function(json) { - console.log(json); - }); -``` - -#### axios - -In Node.js you can post a file using [axios](https://github.com/axios/axios): -```javascript -const form = new FormData(); -const stream = fs.createReadStream(PATH_TO_FILE); - -form.append('image', stream); - -// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` -const formHeaders = form.getHeaders(); - -axios.post('http://example.com', form, { - headers: { - ...formHeaders, - }, -}) -.then(response => response) -.catch(error => error) -``` - -## Notes - -- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. -- Starting version `2.x` FormData has dropped support for `node@0.10.x`. -- Starting version `3.x` FormData has dropped support for `node@4.x`. - -## License - -Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/index.d.ts b/node_modules/form-data/index.d.ts deleted file mode 100644 index 6e52045..0000000 --- a/node_modules/form-data/index.d.ts +++ /dev/null @@ -1,61 +0,0 @@ -// Definitions by: Carlos Ballesteros Velasco -// Leon Yu -// BendingBender -// Maple Miao - -/// -import * as stream from 'stream'; -import * as http from 'http'; - -export = FormData; - -// Extracted because @types/node doesn't export interfaces. -interface ReadableOptions { - highWaterMark?: number; - encoding?: string; - objectMode?: boolean; - read?(this: stream.Readable, size: number): void; - destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; - autoDestroy?: boolean; -} - -interface Options extends ReadableOptions { - writable?: boolean; - readable?: boolean; - dataSize?: number; - maxDataSize?: number; - pauseStreams?: boolean; -} - -declare class FormData extends stream.Readable { - constructor(options?: Options); - append(key: string, value: any, options?: FormData.AppendOptions | string): void; - getHeaders(userHeaders?: FormData.Headers): FormData.Headers; - submit( - params: string | FormData.SubmitOptions, - callback?: (error: Error | null, response: http.IncomingMessage) => void - ): http.ClientRequest; - getBuffer(): Buffer; - getBoundary(): string; - getLength(callback: (err: Error | null, length: number) => void): void; - getLengthSync(): number; - hasKnownLength(): boolean; -} - -declare namespace FormData { - interface Headers { - [key: string]: any; - } - - interface AppendOptions { - header?: string | Headers; - knownLength?: number; - filename?: string; - filepath?: string; - contentType?: string; - } - - interface SubmitOptions extends http.RequestOptions { - protocol?: 'https:' | 'http:'; - } -} diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js deleted file mode 100644 index 09e7c70..0000000 --- a/node_modules/form-data/lib/browser.js +++ /dev/null @@ -1,2 +0,0 @@ -/* eslint-env browser */ -module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js deleted file mode 100644 index ddfae2e..0000000 --- a/node_modules/form-data/lib/form_data.js +++ /dev/null @@ -1,494 +0,0 @@ -var CombinedStream = require('combined-stream'); -var util = require('util'); -var path = require('path'); -var http = require('http'); -var https = require('https'); -var parseUrl = require('url').parse; -var fs = require('fs'); -var mime = require('mime-types'); -var asynckit = require('asynckit'); -var populate = require('./populate.js'); - -// Public API -module.exports = FormData; - -// make it a Stream -util.inherits(FormData, CombinedStream); - -/** - * Create readable "multipart/form-data" streams. - * Can be used to submit forms - * and file uploads to other web applications. - * - * @constructor - * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream - */ -function FormData(options) { - if (!(this instanceof FormData)) { - return new FormData(options); - } - - this._overheadLength = 0; - this._valueLength = 0; - this._valuesToMeasure = []; - - CombinedStream.call(this); - - options = options || {}; - for (var option in options) { - this[option] = options[option]; - } -} - -FormData.LINE_BREAK = '\r\n'; -FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; - -FormData.prototype.append = function(field, value, options) { - - options = options || {}; - - // allow filename as single option - if (typeof options == 'string') { - options = {filename: options}; - } - - var append = CombinedStream.prototype.append.bind(this); - - // all that streamy business can't handle numbers - if (typeof value == 'number') { - value = '' + value; - } - - // https://github.com/felixge/node-form-data/issues/38 - if (util.isArray(value)) { - // Please convert your array into string - // the way web server expects it - this._error(new Error('Arrays are not supported.')); - return; - } - - var header = this._multiPartHeader(field, value, options); - var footer = this._multiPartFooter(); - - append(header); - append(value); - append(footer); - - // pass along options.knownLength - this._trackLength(header, value, options); -}; - -FormData.prototype._trackLength = function(header, value, options) { - var valueLength = 0; - - // used w/ getLengthSync(), when length is known. - // e.g. for streaming directly from a remote server, - // w/ a known file a size, and not wanting to wait for - // incoming file to finish to get its size. - if (options.knownLength != null) { - valueLength += +options.knownLength; - } else if (Buffer.isBuffer(value)) { - valueLength = value.length; - } else if (typeof value === 'string') { - valueLength = Buffer.byteLength(value); - } - - this._valueLength += valueLength; - - // @check why add CRLF? does this account for custom/multiple CRLFs? - this._overheadLength += - Buffer.byteLength(header) + - FormData.LINE_BREAK.length; - - // empty or either doesn't have path or not an http response - if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { - return; - } - - // no need to bother with the length - if (!options.knownLength) { - this._valuesToMeasure.push(value); - } -}; - -FormData.prototype._lengthRetriever = function(value, callback) { - - if (value.hasOwnProperty('fd')) { - - // take read range into a account - // `end` = Infinity –> read file till the end - // - // TODO: Looks like there is bug in Node fs.createReadStream - // it doesn't respect `end` options without `start` options - // Fix it when node fixes it. - // https://github.com/joyent/node/issues/7819 - if (value.end != undefined && value.end != Infinity && value.start != undefined) { - - // when end specified - // no need to calculate range - // inclusive, starts with 0 - callback(null, value.end + 1 - (value.start ? value.start : 0)); - - // not that fast snoopy - } else { - // still need to fetch file size from fs - fs.stat(value.path, function(err, stat) { - - var fileSize; - - if (err) { - callback(err); - return; - } - - // update final size based on the range options - fileSize = stat.size - (value.start ? value.start : 0); - callback(null, fileSize); - }); - } - - // or http response - } else if (value.hasOwnProperty('httpVersion')) { - callback(null, +value.headers['content-length']); - - // or request stream http://github.com/mikeal/request - } else if (value.hasOwnProperty('httpModule')) { - // wait till response come back - value.on('response', function(response) { - value.pause(); - callback(null, +response.headers['content-length']); - }); - value.resume(); - - // something else - } else { - callback('Unknown stream'); - } -}; - -FormData.prototype._multiPartHeader = function(field, value, options) { - // custom header specified (as string)? - // it becomes responsible for boundary - // (e.g. to handle extra CRLFs on .NET servers) - if (typeof options.header == 'string') { - return options.header; - } - - var contentDisposition = this._getContentDisposition(value, options); - var contentType = this._getContentType(value, options); - - var contents = ''; - var headers = { - // add custom disposition as third element or keep it two elements if not - 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), - // if no content type. allow it to be empty array - 'Content-Type': [].concat(contentType || []) - }; - - // allow custom headers. - if (typeof options.header == 'object') { - populate(headers, options.header); - } - - var header; - for (var prop in headers) { - if (!headers.hasOwnProperty(prop)) continue; - header = headers[prop]; - - // skip nullish headers. - if (header == null) { - continue; - } - - // convert all headers to arrays. - if (!Array.isArray(header)) { - header = [header]; - } - - // add non-empty headers. - if (header.length) { - contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; - } - } - - return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; -}; - -FormData.prototype._getContentDisposition = function(value, options) { - - var filename - , contentDisposition - ; - - if (typeof options.filepath === 'string') { - // custom filepath for relative paths - filename = path.normalize(options.filepath).replace(/\\/g, '/'); - } else if (options.filename || value.name || value.path) { - // custom filename take precedence - // formidable and the browser add a name property - // fs- and request- streams have path property - filename = path.basename(options.filename || value.name || value.path); - } else if (value.readable && value.hasOwnProperty('httpVersion')) { - // or try http response - filename = path.basename(value.client._httpMessage.path || ''); - } - - if (filename) { - contentDisposition = 'filename="' + filename + '"'; - } - - return contentDisposition; -}; - -FormData.prototype._getContentType = function(value, options) { - - // use custom content-type above all - var contentType = options.contentType; - - // or try `name` from formidable, browser - if (!contentType && value.name) { - contentType = mime.lookup(value.name); - } - - // or try `path` from fs-, request- streams - if (!contentType && value.path) { - contentType = mime.lookup(value.path); - } - - // or if it's http-reponse - if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { - contentType = value.headers['content-type']; - } - - // or guess it from the filepath or filename - if (!contentType && (options.filepath || options.filename)) { - contentType = mime.lookup(options.filepath || options.filename); - } - - // fallback to the default content type if `value` is not simple value - if (!contentType && typeof value == 'object') { - contentType = FormData.DEFAULT_CONTENT_TYPE; - } - - return contentType; -}; - -FormData.prototype._multiPartFooter = function() { - return function(next) { - var footer = FormData.LINE_BREAK; - - var lastPart = (this._streams.length === 0); - if (lastPart) { - footer += this._lastBoundary(); - } - - next(footer); - }.bind(this); -}; - -FormData.prototype._lastBoundary = function() { - return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; -}; - -FormData.prototype.getHeaders = function(userHeaders) { - var header; - var formHeaders = { - 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() - }; - - for (header in userHeaders) { - if (userHeaders.hasOwnProperty(header)) { - formHeaders[header.toLowerCase()] = userHeaders[header]; - } - } - - return formHeaders; -}; - -FormData.prototype.getBoundary = function() { - if (!this._boundary) { - this._generateBoundary(); - } - - return this._boundary; -}; - -FormData.prototype.getBuffer = function() { - var dataBuffer = new Buffer.alloc( 0 ); - var boundary = this.getBoundary(); - - // Create the form content. Add Line breaks to the end of data. - for (var i = 0, len = this._streams.length; i < len; i++) { - if (typeof this._streams[i] !== 'function') { - - // Add content to the buffer. - if(Buffer.isBuffer(this._streams[i])) { - dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); - }else { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); - } - - // Add break after content. - if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); - } - } - } - - // Add the footer and return the Buffer object. - return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); -}; - -FormData.prototype._generateBoundary = function() { - // This generates a 50 character boundary similar to those used by Firefox. - // They are optimized for boyer-moore parsing. - var boundary = '--------------------------'; - for (var i = 0; i < 24; i++) { - boundary += Math.floor(Math.random() * 10).toString(16); - } - - this._boundary = boundary; -}; - -// Note: getLengthSync DOESN'T calculate streams length -// As workaround one can calculate file size manually -// and add it as knownLength option -FormData.prototype.getLengthSync = function() { - var knownLength = this._overheadLength + this._valueLength; - - // Don't get confused, there are 3 "internal" streams for each keyval pair - // so it basically checks if there is any value added to the form - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } - - // https://github.com/form-data/form-data/issues/40 - if (!this.hasKnownLength()) { - // Some async length retrievers are present - // therefore synchronous length calculation is false. - // Please use getLength(callback) to get proper length - this._error(new Error('Cannot calculate proper length in synchronous way.')); - } - - return knownLength; -}; - -// Public API to check if length of added values is known -// https://github.com/form-data/form-data/issues/196 -// https://github.com/form-data/form-data/issues/262 -FormData.prototype.hasKnownLength = function() { - var hasKnownLength = true; - - if (this._valuesToMeasure.length) { - hasKnownLength = false; - } - - return hasKnownLength; -}; - -FormData.prototype.getLength = function(cb) { - var knownLength = this._overheadLength + this._valueLength; - - if (this._streams.length) { - knownLength += this._lastBoundary().length; - } - - if (!this._valuesToMeasure.length) { - process.nextTick(cb.bind(this, null, knownLength)); - return; - } - - asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { - if (err) { - cb(err); - return; - } - - values.forEach(function(length) { - knownLength += length; - }); - - cb(null, knownLength); - }); -}; - -FormData.prototype.submit = function(params, cb) { - var request - , options - , defaults = {method: 'post'} - ; - - // parse provided url if it's string - // or treat it as options object - if (typeof params == 'string') { - - params = parseUrl(params); - options = populate({ - port: params.port, - path: params.pathname, - host: params.hostname, - protocol: params.protocol - }, defaults); - - // use custom params - } else { - - options = populate(params, defaults); - // if no port provided use default one - if (!options.port) { - options.port = options.protocol == 'https:' ? 443 : 80; - } - } - - // put that good code in getHeaders to some use - options.headers = this.getHeaders(params.headers); - - // https if specified, fallback to http in any other case - if (options.protocol == 'https:') { - request = https.request(options); - } else { - request = http.request(options); - } - - // get content length and fire away - this.getLength(function(err, length) { - if (err) { - this._error(err); - return; - } - - // add content length - request.setHeader('Content-Length', length); - - this.pipe(request); - if (cb) { - var onResponse; - - var callback = function (error, responce) { - request.removeListener('error', callback); - request.removeListener('response', onResponse); - - return cb.call(this, error, responce); - }; - - onResponse = callback.bind(this, null); - - request.on('error', callback); - request.on('response', onResponse); - } - }.bind(this)); - - return request; -}; - -FormData.prototype._error = function(err) { - if (!this.error) { - this.error = err; - this.pause(); - this.emit('error', err); - } -}; - -FormData.prototype.toString = function () { - return '[object FormData]'; -}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js deleted file mode 100644 index 4d35738..0000000 --- a/node_modules/form-data/lib/populate.js +++ /dev/null @@ -1,10 +0,0 @@ -// populates missing values -module.exports = function(dst, src) { - - Object.keys(src).forEach(function(prop) - { - dst[prop] = dst[prop] || src[prop]; - }); - - return dst; -}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json deleted file mode 100644 index 2b48c1d..0000000 --- a/node_modules/form-data/package.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "_from": "form-data@^3.0.0", - "_id": "form-data@3.0.0", - "_inBundle": false, - "_integrity": "sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==", - "_location": "/form-data", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "form-data@^3.0.0", - "name": "form-data", - "escapedName": "form-data", - "rawSpec": "^3.0.0", - "saveSpec": null, - "fetchSpec": "^3.0.0" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.0.tgz", - "_shasum": "31b7e39c85f1355b7139ee0c647cf0de7f83c682", - "_spec": "form-data@^3.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Felix Geisendörfer", - "email": "felix@debuggable.com", - "url": "http://debuggable.com/" - }, - "browser": "./lib/browser", - "bugs": { - "url": "https://github.com/form-data/form-data/issues" - }, - "bundleDependencies": false, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "deprecated": false, - "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", - "devDependencies": { - "@types/node": "^12.0.10", - "browserify": "^13.1.1", - "browserify-istanbul": "^2.0.0", - "coveralls": "^3.0.4", - "cross-spawn": "^6.0.5", - "eslint": "^6.0.1", - "fake": "^0.2.2", - "far": "^0.0.7", - "formidable": "^1.0.17", - "in-publish": "^2.0.0", - "is-node-modern": "^1.0.0", - "istanbul": "^0.4.5", - "obake": "^0.1.2", - "pkgfiles": "^2.3.0", - "pre-commit": "^1.1.3", - "puppeteer": "^1.19.0", - "request": "^2.88.0", - "rimraf": "^2.7.1", - "tape": "^4.6.2", - "typescript": "^3.5.2" - }, - "engines": { - "node": ">= 6" - }, - "homepage": "https://github.com/form-data/form-data#readme", - "license": "MIT", - "main": "./lib/form_data", - "name": "form-data", - "pre-commit": [ - "lint", - "ci-test", - "check" - ], - "repository": { - "type": "git", - "url": "git://github.com/form-data/form-data.git" - }, - "scripts": { - "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", - "check": "istanbul check-coverage coverage/coverage*.json", - "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", - "ci-test": "npm run test && npm run browser && npm run report", - "debug": "verbose=1 ./test/run.js", - "files": "pkgfiles --sort=name", - "get-version": "node -e \"console.log(require('./package.json').version)\"", - "lint": "eslint lib/*.js test/*.js test/integration/*.js", - "postpublish": "npm run restore-readme", - "posttest": "istanbul report lcov text", - "predebug": "rimraf coverage test/tmp", - "prepublish": "in-publish && npm run update-readme || not-in-publish", - "pretest": "rimraf coverage test/tmp", - "report": "istanbul report lcov text", - "restore-readme": "mv README.md.bak README.md", - "test": "istanbul cover test/run.js", - "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md" - }, - "typings": "./index.d.ts", - "version": "3.0.0" -} diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE deleted file mode 100644 index dea3013..0000000 --- a/node_modules/inherits/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md deleted file mode 100644 index b1c5665..0000000 --- a/node_modules/inherits/README.md +++ /dev/null @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js deleted file mode 100644 index f71f2d9..0000000 --- a/node_modules/inherits/inherits.js +++ /dev/null @@ -1,9 +0,0 @@ -try { - var util = require('util'); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = require('./inherits_browser.js'); -} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js deleted file mode 100644 index 86bbb3d..0000000 --- a/node_modules/inherits/inherits_browser.js +++ /dev/null @@ -1,27 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } - } -} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json deleted file mode 100644 index 6fe7808..0000000 --- a/node_modules/inherits/package.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "_from": "inherits@^2.0.3", - "_id": "inherits@2.0.4", - "_inBundle": false, - "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "_location": "/inherits", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "inherits@^2.0.3", - "name": "inherits", - "escapedName": "inherits", - "rawSpec": "^2.0.3", - "saveSpec": null, - "fetchSpec": "^2.0.3" - }, - "_requiredBy": [ - "/asn1.js", - "/bl/readable-stream", - "/browserify", - "/browserify-aes", - "/browserify-des", - "/browserify-sign", - "/browserify/readable-stream", - "/chokidar", - "/cipher-base", - "/concat-stream", - "/concat-stream/readable-stream", - "/create-hash", - "/create-hmac", - "/crypto-browserify", - "/des.js", - "/duplexer2/readable-stream", - "/duplexify", - "/elliptic", - "/from2", - "/from2/readable-stream", - "/glob", - "/hash-base", - "/hash.js", - "/htmlparser2", - "/http-errors", - "/hyperquest/readable-stream", - "/hyperquest/through2/readable-stream", - "/labeled-stream-splicer", - "/md5.js", - "/module-deps", - "/module-deps/readable-stream", - "/pumpify", - "/read-only-stream/readable-stream", - "/readable-stream", - "/readdirp/readable-stream", - "/ripemd160", - "/sha.js", - "/split2/readable-stream", - "/stdout-stream/readable-stream", - "/stream-browserify", - "/stream-browserify/readable-stream", - "/stream-combiner2/readable-stream", - "/stream-http", - "/stream-splicer", - "/stream-splicer/readable-stream", - "/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", - "_spec": "inherits@^2.0.3", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pumpify", - "browser": "./inherits_browser.js", - "bugs": { - "url": "https://github.com/isaacs/inherits/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "devDependencies": { - "tap": "^14.2.4" - }, - "files": [ - "inherits.js", - "inherits_browser.js" - ], - "homepage": "https://github.com/isaacs/inherits#readme", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "license": "ISC", - "main": "./inherits.js", - "name": "inherits", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/inherits.git" - }, - "scripts": { - "test": "tap" - }, - "version": "2.0.4" -} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md deleted file mode 100644 index 8afff37..0000000 --- a/node_modules/mime-db/HISTORY.md +++ /dev/null @@ -1,439 +0,0 @@ -1.43.0 / 2020-01-05 -=================== - - * Add `application/x-keepass2` with extension `.kdbx` - * Add extension `.mxmf` to `audio/mobile-xmf` - * Add extensions from IANA for `application/*+xml` types - * Add new upstream MIME types - -1.42.0 / 2019-09-25 -=================== - - * Add `image/vnd.ms-dds` with extension `.dds` - * Add new upstream MIME types - * Remove compressible from `multipart/mixed` - -1.41.0 / 2019-08-30 -=================== - - * Add new upstream MIME types - * Add `application/toml` with extension `.toml` - * Mark `font/ttf` as compressible - -1.40.0 / 2019-04-20 -=================== - - * Add extensions from IANA for `model/*` types - * Add `text/mdx` with extension `.mdx` - -1.39.0 / 2019-04-04 -=================== - - * Add extensions `.siv` and `.sieve` to `application/sieve` - * Add new upstream MIME types - -1.38.0 / 2019-02-04 -=================== - - * Add extension `.nq` to `application/n-quads` - * Add extension `.nt` to `application/n-triples` - * Add new upstream MIME types - * Mark `text/less` as compressible - -1.37.0 / 2018-10-19 -=================== - - * Add extensions to HEIC image types - * Add new upstream MIME types - -1.36.0 / 2018-08-20 -=================== - - * Add Apple file extensions from IANA - * Add extensions from IANA for `image/*` types - * Add new upstream MIME types - -1.35.0 / 2018-07-15 -=================== - - * Add extension `.owl` to `application/rdf+xml` - * Add new upstream MIME types - - Removes extension `.woff` from `application/font-woff` - -1.34.0 / 2018-06-03 -=================== - - * Add extension `.csl` to `application/vnd.citationstyles.style+xml` - * Add extension `.es` to `application/ecmascript` - * Add new upstream MIME types - * Add `UTF-8` as default charset for `text/turtle` - * Mark all XML-derived types as compressible - -1.33.0 / 2018-02-15 -=================== - - * Add extensions from IANA for `message/*` types - * Add new upstream MIME types - * Fix some incorrect OOXML types - * Remove `application/font-woff2` - -1.32.0 / 2017-11-29 -=================== - - * Add new upstream MIME types - * Update `text/hjson` to registered `application/hjson` - * Add `text/shex` with extension `.shex` - -1.31.0 / 2017-10-25 -=================== - - * Add `application/raml+yaml` with extension `.raml` - * Add `application/wasm` with extension `.wasm` - * Add new `font` type from IANA - * Add new upstream font extensions - * Add new upstream MIME types - * Add extensions for JPEG-2000 images - -1.30.0 / 2017-08-27 -=================== - - * Add `application/vnd.ms-outlook` - * Add `application/x-arj` - * Add extension `.mjs` to `application/javascript` - * Add glTF types and extensions - * Add new upstream MIME types - * Add `text/x-org` - * Add VirtualBox MIME types - * Fix `source` records for `video/*` types that are IANA - * Update `font/opentype` to registered `font/otf` - -1.29.0 / 2017-07-10 -=================== - - * Add `application/fido.trusted-apps+json` - * Add extension `.wadl` to `application/vnd.sun.wadl+xml` - * Add new upstream MIME types - * Add `UTF-8` as default charset for `text/css` - -1.28.0 / 2017-05-14 -=================== - - * Add new upstream MIME types - * Add extension `.gz` to `application/gzip` - * Update extensions `.md` and `.markdown` to be `text/markdown` - -1.27.0 / 2017-03-16 -=================== - - * Add new upstream MIME types - * Add `image/apng` with extension `.apng` - -1.26.0 / 2017-01-14 -=================== - - * Add new upstream MIME types - * Add extension `.geojson` to `application/geo+json` - -1.25.0 / 2016-11-11 -=================== - - * Add new upstream MIME types - -1.24.0 / 2016-09-18 -=================== - - * Add `audio/mp3` - * Add new upstream MIME types - -1.23.0 / 2016-05-01 -=================== - - * Add new upstream MIME types - * Add extension `.3gpp` to `audio/3gpp` - -1.22.0 / 2016-02-15 -=================== - - * Add `text/slim` - * Add extension `.rng` to `application/xml` - * Add new upstream MIME types - * Fix extension of `application/dash+xml` to be `.mpd` - * Update primary extension to `.m4a` for `audio/mp4` - -1.21.0 / 2016-01-06 -=================== - - * Add Google document types - * Add new upstream MIME types - -1.20.0 / 2015-11-10 -=================== - - * Add `text/x-suse-ymp` - * Add new upstream MIME types - -1.19.0 / 2015-09-17 -=================== - - * Add `application/vnd.apple.pkpass` - * Add new upstream MIME types - -1.18.0 / 2015-09-03 -=================== - - * Add new upstream MIME types - -1.17.0 / 2015-08-13 -=================== - - * Add `application/x-msdos-program` - * Add `audio/g711-0` - * Add `image/vnd.mozilla.apng` - * Add extension `.exe` to `application/x-msdos-program` - -1.16.0 / 2015-07-29 -=================== - - * Add `application/vnd.uri-map` - -1.15.0 / 2015-07-13 -=================== - - * Add `application/x-httpd-php` - -1.14.0 / 2015-06-25 -=================== - - * Add `application/scim+json` - * Add `application/vnd.3gpp.ussd+xml` - * Add `application/vnd.biopax.rdf+xml` - * Add `text/x-processing` - -1.13.0 / 2015-06-07 -=================== - - * Add nginx as a source - * Add `application/x-cocoa` - * Add `application/x-java-archive-diff` - * Add `application/x-makeself` - * Add `application/x-perl` - * Add `application/x-pilot` - * Add `application/x-redhat-package-manager` - * Add `application/x-sea` - * Add `audio/x-m4a` - * Add `audio/x-realaudio` - * Add `image/x-jng` - * Add `text/mathml` - -1.12.0 / 2015-06-05 -=================== - - * Add `application/bdoc` - * Add `application/vnd.hyperdrive+json` - * Add `application/x-bdoc` - * Add extension `.rtf` to `text/rtf` - -1.11.0 / 2015-05-31 -=================== - - * Add `audio/wav` - * Add `audio/wave` - * Add extension `.litcoffee` to `text/coffeescript` - * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` - * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` - -1.10.0 / 2015-05-19 -=================== - - * Add `application/vnd.balsamiq.bmpr` - * Add `application/vnd.microsoft.portable-executable` - * Add `application/x-ns-proxy-autoconfig` - -1.9.1 / 2015-04-19 -================== - - * Remove `.json` extension from `application/manifest+json` - - This is causing bugs downstream - -1.9.0 / 2015-04-19 -================== - - * Add `application/manifest+json` - * Add `application/vnd.micro+json` - * Add `image/vnd.zbrush.pcx` - * Add `image/x-ms-bmp` - -1.8.0 / 2015-03-13 -================== - - * Add `application/vnd.citationstyles.style+xml` - * Add `application/vnd.fastcopy-disk-image` - * Add `application/vnd.gov.sk.xmldatacontainer+xml` - * Add extension `.jsonld` to `application/ld+json` - -1.7.0 / 2015-02-08 -================== - - * Add `application/vnd.gerber` - * Add `application/vnd.msa-disk-image` - -1.6.1 / 2015-02-05 -================== - - * Community extensions ownership transferred from `node-mime` - -1.6.0 / 2015-01-29 -================== - - * Add `application/jose` - * Add `application/jose+json` - * Add `application/json-seq` - * Add `application/jwk+json` - * Add `application/jwk-set+json` - * Add `application/jwt` - * Add `application/rdap+json` - * Add `application/vnd.gov.sk.e-form+xml` - * Add `application/vnd.ims.imsccv1p3` - -1.5.0 / 2014-12-30 -================== - - * Add `application/vnd.oracle.resource+json` - * Fix various invalid MIME type entries - - `application/mbox+xml` - - `application/oscp-response` - - `application/vwg-multiplexed` - - `audio/g721` - -1.4.0 / 2014-12-21 -================== - - * Add `application/vnd.ims.imsccv1p2` - * Fix various invalid MIME type entries - - `application/vnd-acucobol` - - `application/vnd-curl` - - `application/vnd-dart` - - `application/vnd-dxr` - - `application/vnd-fdf` - - `application/vnd-mif` - - `application/vnd-sema` - - `application/vnd-wap-wmlc` - - `application/vnd.adobe.flash-movie` - - `application/vnd.dece-zip` - - `application/vnd.dvb_service` - - `application/vnd.micrografx-igx` - - `application/vnd.sealed-doc` - - `application/vnd.sealed-eml` - - `application/vnd.sealed-mht` - - `application/vnd.sealed-ppt` - - `application/vnd.sealed-tiff` - - `application/vnd.sealed-xls` - - `application/vnd.sealedmedia.softseal-html` - - `application/vnd.sealedmedia.softseal-pdf` - - `application/vnd.wap-slc` - - `application/vnd.wap-wbxml` - - `audio/vnd.sealedmedia.softseal-mpeg` - - `image/vnd-djvu` - - `image/vnd-svf` - - `image/vnd-wap-wbmp` - - `image/vnd.sealed-png` - - `image/vnd.sealedmedia.softseal-gif` - - `image/vnd.sealedmedia.softseal-jpg` - - `model/vnd-dwf` - - `model/vnd.parasolid.transmit-binary` - - `model/vnd.parasolid.transmit-text` - - `text/vnd-a` - - `text/vnd-curl` - - `text/vnd.wap-wml` - * Remove example template MIME types - - `application/example` - - `audio/example` - - `image/example` - - `message/example` - - `model/example` - - `multipart/example` - - `text/example` - - `video/example` - -1.3.1 / 2014-12-16 -================== - - * Fix missing extensions - - `application/json5` - - `text/hjson` - -1.3.0 / 2014-12-07 -================== - - * Add `application/a2l` - * Add `application/aml` - * Add `application/atfx` - * Add `application/atxml` - * Add `application/cdfx+xml` - * Add `application/dii` - * Add `application/json5` - * Add `application/lxf` - * Add `application/mf4` - * Add `application/vnd.apache.thrift.compact` - * Add `application/vnd.apache.thrift.json` - * Add `application/vnd.coffeescript` - * Add `application/vnd.enphase.envoy` - * Add `application/vnd.ims.imsccv1p1` - * Add `text/csv-schema` - * Add `text/hjson` - * Add `text/markdown` - * Add `text/yaml` - -1.2.0 / 2014-11-09 -================== - - * Add `application/cea` - * Add `application/dit` - * Add `application/vnd.gov.sk.e-form+zip` - * Add `application/vnd.tmd.mediaflex.api+xml` - * Type `application/epub+zip` is now IANA-registered - -1.1.2 / 2014-10-23 -================== - - * Rebuild database for `application/x-www-form-urlencoded` change - -1.1.1 / 2014-10-20 -================== - - * Mark `application/x-www-form-urlencoded` as compressible. - -1.1.0 / 2014-09-28 -================== - - * Add `application/font-woff2` - -1.0.3 / 2014-09-25 -================== - - * Fix engine requirement in package - -1.0.2 / 2014-09-25 -================== - - * Add `application/coap-group+json` - * Add `application/dcd` - * Add `application/vnd.apache.thrift.binary` - * Add `image/vnd.tencent.tap` - * Mark all JSON-derived types as compressible - * Update `text/vtt` data - -1.0.1 / 2014-08-30 -================== - - * Fix extension ordering - -1.0.0 / 2014-08-30 -================== - - * Add `application/atf` - * Add `application/merge-patch+json` - * Add `multipart/x-mixed-replace` - * Add `source: 'apache'` metadata - * Add `source: 'iana'` metadata - * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE deleted file mode 100644 index a7ae8ee..0000000 --- a/node_modules/mime-db/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ - -The MIT License (MIT) - -Copyright (c) 2014 Jonathan Ong me@jongleberry.com - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md deleted file mode 100644 index 68e3a1a..0000000 --- a/node_modules/mime-db/README.md +++ /dev/null @@ -1,100 +0,0 @@ -# mime-db - -[![NPM Version][npm-version-image]][npm-url] -[![NPM Downloads][npm-downloads-image]][npm-url] -[![Node.js Version][node-image]][node-url] -[![Build Status][travis-image]][travis-url] -[![Coverage Status][coveralls-image]][coveralls-url] - -This is a database of all mime types. -It consists of a single, public JSON file and does not include any logic, -allowing it to remain as un-opinionated as possible with an API. -It aggregates data from the following sources: - -- http://www.iana.org/assignments/media-types/media-types.xhtml -- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types -- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types - -## Installation - -```bash -npm install mime-db -``` - -### Database Download - -If you're crazy enough to use this in the browser, you can just grab the -JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to -replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) -as the JSON format may change in the future. - -``` -https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json -``` - -## Usage - -```js -var db = require('mime-db'); - -// grab data on .js files -var data = db['application/javascript']; -``` - -## Data Structure - -The JSON file is a map lookup for lowercased mime types. -Each mime type has the following properties: - -- `.source` - where the mime type is defined. - If not set, it's probably a custom media type. - - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) - - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) - - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) -- `.extensions[]` - known extensions associated with this mime type. -- `.compressible` - whether a file of this type can be gzipped. -- `.charset` - the default charset associated with this type, if any. - -If unknown, every property could be `undefined`. - -## Contributing - -To edit the database, only make PRs against `src/custom.json` or -`src/custom-suffix.json`. - -The `src/custom.json` file is a JSON object with the MIME type as the keys -and the values being an object with the following keys: - -- `compressible` - leave out if you don't know, otherwise `true`/`false` to - indicate whether the data represented by the type is typically compressible. -- `extensions` - include an array of file extensions that are associated with - the type. -- `notes` - human-readable notes about the type, typically what the type is. -- `sources` - include an array of URLs of where the MIME type and the associated - extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); - links to type aggregating sites and Wikipedia are _not acceptable_. - -To update the build, run `npm run build`. - -### Adding Custom Media Types - -The best way to get new media types included in this library is to register -them with the IANA. The community registration procedure is outlined in -[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types -registered with the IANA are automatically pulled into this library. - -If that is not possible / feasible, they can be added directly here as a -"custom" type. To do this, it is required to have a primary source that -definitively lists the media type. If an extension is going to be listed as -associateed with this media type, the source must definitively link the -media type and extension as well. - -[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master -[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master -[node-image]: https://badgen.net/npm/node/mime-db -[node-url]: https://nodejs.org/en/download -[npm-downloads-image]: https://badgen.net/npm/dm/mime-db -[npm-url]: https://npmjs.org/package/mime-db -[npm-version-image]: https://badgen.net/npm/v/mime-db -[travis-image]: https://badgen.net/travis/jshttp/mime-db/master -[travis-url]: https://travis-ci.org/jshttp/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json deleted file mode 100644 index cfa3c63..0000000 --- a/node_modules/mime-db/db.json +++ /dev/null @@ -1,8060 +0,0 @@ -{ - "application/1d-interleaved-parityfec": { - "source": "iana" - }, - "application/3gpdash-qoe-report+xml": { - "source": "iana", - "compressible": true - }, - "application/3gpp-ims+xml": { - "source": "iana", - "compressible": true - }, - "application/a2l": { - "source": "iana" - }, - "application/activemessage": { - "source": "iana" - }, - "application/activity+json": { - "source": "iana", - "compressible": true - }, - "application/alto-costmap+json": { - "source": "iana", - "compressible": true - }, - "application/alto-costmapfilter+json": { - "source": "iana", - "compressible": true - }, - "application/alto-directory+json": { - "source": "iana", - "compressible": true - }, - "application/alto-endpointcost+json": { - "source": "iana", - "compressible": true - }, - "application/alto-endpointcostparams+json": { - "source": "iana", - "compressible": true - }, - "application/alto-endpointprop+json": { - "source": "iana", - "compressible": true - }, - "application/alto-endpointpropparams+json": { - "source": "iana", - "compressible": true - }, - "application/alto-error+json": { - "source": "iana", - "compressible": true - }, - "application/alto-networkmap+json": { - "source": "iana", - "compressible": true - }, - "application/alto-networkmapfilter+json": { - "source": "iana", - "compressible": true - }, - "application/aml": { - "source": "iana" - }, - "application/andrew-inset": { - "source": "iana", - "extensions": ["ez"] - }, - "application/applefile": { - "source": "iana" - }, - "application/applixware": { - "source": "apache", - "extensions": ["aw"] - }, - "application/atf": { - "source": "iana" - }, - "application/atfx": { - "source": "iana" - }, - "application/atom+xml": { - "source": "iana", - "compressible": true, - "extensions": ["atom"] - }, - "application/atomcat+xml": { - "source": "iana", - "compressible": true, - "extensions": ["atomcat"] - }, - "application/atomdeleted+xml": { - "source": "iana", - "compressible": true, - "extensions": ["atomdeleted"] - }, - "application/atomicmail": { - "source": "iana" - }, - "application/atomsvc+xml": { - "source": "iana", - "compressible": true, - "extensions": ["atomsvc"] - }, - "application/atsc-dwd+xml": { - "source": "iana", - "compressible": true, - "extensions": ["dwd"] - }, - "application/atsc-held+xml": { - "source": "iana", - "compressible": true, - "extensions": ["held"] - }, - "application/atsc-rdt+json": { - "source": "iana", - "compressible": true - }, - "application/atsc-rsat+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rsat"] - }, - "application/atxml": { - "source": "iana" - }, - "application/auth-policy+xml": { - "source": "iana", - "compressible": true - }, - "application/bacnet-xdd+zip": { - "source": "iana", - "compressible": false - }, - "application/batch-smtp": { - "source": "iana" - }, - "application/bdoc": { - "compressible": false, - "extensions": ["bdoc"] - }, - "application/beep+xml": { - "source": "iana", - "compressible": true - }, - "application/calendar+json": { - "source": "iana", - "compressible": true - }, - "application/calendar+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xcs"] - }, - "application/call-completion": { - "source": "iana" - }, - "application/cals-1840": { - "source": "iana" - }, - "application/cbor": { - "source": "iana" - }, - "application/cbor-seq": { - "source": "iana" - }, - "application/cccex": { - "source": "iana" - }, - "application/ccmp+xml": { - "source": "iana", - "compressible": true - }, - "application/ccxml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ccxml"] - }, - "application/cdfx+xml": { - "source": "iana", - "compressible": true, - "extensions": ["cdfx"] - }, - "application/cdmi-capability": { - "source": "iana", - "extensions": ["cdmia"] - }, - "application/cdmi-container": { - "source": "iana", - "extensions": ["cdmic"] - }, - "application/cdmi-domain": { - "source": "iana", - "extensions": ["cdmid"] - }, - "application/cdmi-object": { - "source": "iana", - "extensions": ["cdmio"] - }, - "application/cdmi-queue": { - "source": "iana", - "extensions": ["cdmiq"] - }, - "application/cdni": { - "source": "iana" - }, - "application/cea": { - "source": "iana" - }, - "application/cea-2018+xml": { - "source": "iana", - "compressible": true - }, - "application/cellml+xml": { - "source": "iana", - "compressible": true - }, - "application/cfw": { - "source": "iana" - }, - "application/clue+xml": { - "source": "iana", - "compressible": true - }, - "application/clue_info+xml": { - "source": "iana", - "compressible": true - }, - "application/cms": { - "source": "iana" - }, - "application/cnrp+xml": { - "source": "iana", - "compressible": true - }, - "application/coap-group+json": { - "source": "iana", - "compressible": true - }, - "application/coap-payload": { - "source": "iana" - }, - "application/commonground": { - "source": "iana" - }, - "application/conference-info+xml": { - "source": "iana", - "compressible": true - }, - "application/cose": { - "source": "iana" - }, - "application/cose-key": { - "source": "iana" - }, - "application/cose-key-set": { - "source": "iana" - }, - "application/cpl+xml": { - "source": "iana", - "compressible": true - }, - "application/csrattrs": { - "source": "iana" - }, - "application/csta+xml": { - "source": "iana", - "compressible": true - }, - "application/cstadata+xml": { - "source": "iana", - "compressible": true - }, - "application/csvm+json": { - "source": "iana", - "compressible": true - }, - "application/cu-seeme": { - "source": "apache", - "extensions": ["cu"] - }, - "application/cwt": { - "source": "iana" - }, - "application/cybercash": { - "source": "iana" - }, - "application/dart": { - "compressible": true - }, - "application/dash+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mpd"] - }, - "application/dashdelta": { - "source": "iana" - }, - "application/davmount+xml": { - "source": "iana", - "compressible": true, - "extensions": ["davmount"] - }, - "application/dca-rft": { - "source": "iana" - }, - "application/dcd": { - "source": "iana" - }, - "application/dec-dx": { - "source": "iana" - }, - "application/dialog-info+xml": { - "source": "iana", - "compressible": true - }, - "application/dicom": { - "source": "iana" - }, - "application/dicom+json": { - "source": "iana", - "compressible": true - }, - "application/dicom+xml": { - "source": "iana", - "compressible": true - }, - "application/dii": { - "source": "iana" - }, - "application/dit": { - "source": "iana" - }, - "application/dns": { - "source": "iana" - }, - "application/dns+json": { - "source": "iana", - "compressible": true - }, - "application/dns-message": { - "source": "iana" - }, - "application/docbook+xml": { - "source": "apache", - "compressible": true, - "extensions": ["dbk"] - }, - "application/dskpp+xml": { - "source": "iana", - "compressible": true - }, - "application/dssc+der": { - "source": "iana", - "extensions": ["dssc"] - }, - "application/dssc+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdssc"] - }, - "application/dvcs": { - "source": "iana" - }, - "application/ecmascript": { - "source": "iana", - "compressible": true, - "extensions": ["ecma","es"] - }, - "application/edi-consent": { - "source": "iana" - }, - "application/edi-x12": { - "source": "iana", - "compressible": false - }, - "application/edifact": { - "source": "iana", - "compressible": false - }, - "application/efi": { - "source": "iana" - }, - "application/emergencycalldata.comment+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.control+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.deviceinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.ecall.msd": { - "source": "iana" - }, - "application/emergencycalldata.providerinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.serviceinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.subscriberinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/emergencycalldata.veds+xml": { - "source": "iana", - "compressible": true - }, - "application/emma+xml": { - "source": "iana", - "compressible": true, - "extensions": ["emma"] - }, - "application/emotionml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["emotionml"] - }, - "application/encaprtp": { - "source": "iana" - }, - "application/epp+xml": { - "source": "iana", - "compressible": true - }, - "application/epub+zip": { - "source": "iana", - "compressible": false, - "extensions": ["epub"] - }, - "application/eshop": { - "source": "iana" - }, - "application/exi": { - "source": "iana", - "extensions": ["exi"] - }, - "application/expect-ct-report+json": { - "source": "iana", - "compressible": true - }, - "application/fastinfoset": { - "source": "iana" - }, - "application/fastsoap": { - "source": "iana" - }, - "application/fdt+xml": { - "source": "iana", - "compressible": true, - "extensions": ["fdt"] - }, - "application/fhir+json": { - "source": "iana", - "compressible": true - }, - "application/fhir+xml": { - "source": "iana", - "compressible": true - }, - "application/fido.trusted-apps+json": { - "compressible": true - }, - "application/fits": { - "source": "iana" - }, - "application/flexfec": { - "source": "iana" - }, - "application/font-sfnt": { - "source": "iana" - }, - "application/font-tdpfr": { - "source": "iana", - "extensions": ["pfr"] - }, - "application/font-woff": { - "source": "iana", - "compressible": false - }, - "application/framework-attributes+xml": { - "source": "iana", - "compressible": true - }, - "application/geo+json": { - "source": "iana", - "compressible": true, - "extensions": ["geojson"] - }, - "application/geo+json-seq": { - "source": "iana" - }, - "application/geopackage+sqlite3": { - "source": "iana" - }, - "application/geoxacml+xml": { - "source": "iana", - "compressible": true - }, - "application/gltf-buffer": { - "source": "iana" - }, - "application/gml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["gml"] - }, - "application/gpx+xml": { - "source": "apache", - "compressible": true, - "extensions": ["gpx"] - }, - "application/gxf": { - "source": "apache", - "extensions": ["gxf"] - }, - "application/gzip": { - "source": "iana", - "compressible": false, - "extensions": ["gz"] - }, - "application/h224": { - "source": "iana" - }, - "application/held+xml": { - "source": "iana", - "compressible": true - }, - "application/hjson": { - "extensions": ["hjson"] - }, - "application/http": { - "source": "iana" - }, - "application/hyperstudio": { - "source": "iana", - "extensions": ["stk"] - }, - "application/ibe-key-request+xml": { - "source": "iana", - "compressible": true - }, - "application/ibe-pkg-reply+xml": { - "source": "iana", - "compressible": true - }, - "application/ibe-pp-data": { - "source": "iana" - }, - "application/iges": { - "source": "iana" - }, - "application/im-iscomposing+xml": { - "source": "iana", - "compressible": true - }, - "application/index": { - "source": "iana" - }, - "application/index.cmd": { - "source": "iana" - }, - "application/index.obj": { - "source": "iana" - }, - "application/index.response": { - "source": "iana" - }, - "application/index.vnd": { - "source": "iana" - }, - "application/inkml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ink","inkml"] - }, - "application/iotp": { - "source": "iana" - }, - "application/ipfix": { - "source": "iana", - "extensions": ["ipfix"] - }, - "application/ipp": { - "source": "iana" - }, - "application/isup": { - "source": "iana" - }, - "application/its+xml": { - "source": "iana", - "compressible": true, - "extensions": ["its"] - }, - "application/java-archive": { - "source": "apache", - "compressible": false, - "extensions": ["jar","war","ear"] - }, - "application/java-serialized-object": { - "source": "apache", - "compressible": false, - "extensions": ["ser"] - }, - "application/java-vm": { - "source": "apache", - "compressible": false, - "extensions": ["class"] - }, - "application/javascript": { - "source": "iana", - "charset": "UTF-8", - "compressible": true, - "extensions": ["js","mjs"] - }, - "application/jf2feed+json": { - "source": "iana", - "compressible": true - }, - "application/jose": { - "source": "iana" - }, - "application/jose+json": { - "source": "iana", - "compressible": true - }, - "application/jrd+json": { - "source": "iana", - "compressible": true - }, - "application/json": { - "source": "iana", - "charset": "UTF-8", - "compressible": true, - "extensions": ["json","map"] - }, - "application/json-patch+json": { - "source": "iana", - "compressible": true - }, - "application/json-seq": { - "source": "iana" - }, - "application/json5": { - "extensions": ["json5"] - }, - "application/jsonml+json": { - "source": "apache", - "compressible": true, - "extensions": ["jsonml"] - }, - "application/jwk+json": { - "source": "iana", - "compressible": true - }, - "application/jwk-set+json": { - "source": "iana", - "compressible": true - }, - "application/jwt": { - "source": "iana" - }, - "application/kpml-request+xml": { - "source": "iana", - "compressible": true - }, - "application/kpml-response+xml": { - "source": "iana", - "compressible": true - }, - "application/ld+json": { - "source": "iana", - "compressible": true, - "extensions": ["jsonld"] - }, - "application/lgr+xml": { - "source": "iana", - "compressible": true, - "extensions": ["lgr"] - }, - "application/link-format": { - "source": "iana" - }, - "application/load-control+xml": { - "source": "iana", - "compressible": true - }, - "application/lost+xml": { - "source": "iana", - "compressible": true, - "extensions": ["lostxml"] - }, - "application/lostsync+xml": { - "source": "iana", - "compressible": true - }, - "application/lxf": { - "source": "iana" - }, - "application/mac-binhex40": { - "source": "iana", - "extensions": ["hqx"] - }, - "application/mac-compactpro": { - "source": "apache", - "extensions": ["cpt"] - }, - "application/macwriteii": { - "source": "iana" - }, - "application/mads+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mads"] - }, - "application/manifest+json": { - "charset": "UTF-8", - "compressible": true, - "extensions": ["webmanifest"] - }, - "application/marc": { - "source": "iana", - "extensions": ["mrc"] - }, - "application/marcxml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mrcx"] - }, - "application/mathematica": { - "source": "iana", - "extensions": ["ma","nb","mb"] - }, - "application/mathml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mathml"] - }, - "application/mathml-content+xml": { - "source": "iana", - "compressible": true - }, - "application/mathml-presentation+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-associated-procedure-description+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-deregister+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-envelope+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-msk+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-msk-response+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-protection-description+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-reception-report+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-register+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-register-response+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-schedule+xml": { - "source": "iana", - "compressible": true - }, - "application/mbms-user-service-description+xml": { - "source": "iana", - "compressible": true - }, - "application/mbox": { - "source": "iana", - "extensions": ["mbox"] - }, - "application/media-policy-dataset+xml": { - "source": "iana", - "compressible": true - }, - "application/media_control+xml": { - "source": "iana", - "compressible": true - }, - "application/mediaservercontrol+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mscml"] - }, - "application/merge-patch+json": { - "source": "iana", - "compressible": true - }, - "application/metalink+xml": { - "source": "apache", - "compressible": true, - "extensions": ["metalink"] - }, - "application/metalink4+xml": { - "source": "iana", - "compressible": true, - "extensions": ["meta4"] - }, - "application/mets+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mets"] - }, - "application/mf4": { - "source": "iana" - }, - "application/mikey": { - "source": "iana" - }, - "application/mipc": { - "source": "iana" - }, - "application/mmt-aei+xml": { - "source": "iana", - "compressible": true, - "extensions": ["maei"] - }, - "application/mmt-usd+xml": { - "source": "iana", - "compressible": true, - "extensions": ["musd"] - }, - "application/mods+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mods"] - }, - "application/moss-keys": { - "source": "iana" - }, - "application/moss-signature": { - "source": "iana" - }, - "application/mosskey-data": { - "source": "iana" - }, - "application/mosskey-request": { - "source": "iana" - }, - "application/mp21": { - "source": "iana", - "extensions": ["m21","mp21"] - }, - "application/mp4": { - "source": "iana", - "extensions": ["mp4s","m4p"] - }, - "application/mpeg4-generic": { - "source": "iana" - }, - "application/mpeg4-iod": { - "source": "iana" - }, - "application/mpeg4-iod-xmt": { - "source": "iana" - }, - "application/mrb-consumer+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdf"] - }, - "application/mrb-publish+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdf"] - }, - "application/msc-ivr+xml": { - "source": "iana", - "compressible": true - }, - "application/msc-mixer+xml": { - "source": "iana", - "compressible": true - }, - "application/msword": { - "source": "iana", - "compressible": false, - "extensions": ["doc","dot"] - }, - "application/mud+json": { - "source": "iana", - "compressible": true - }, - "application/multipart-core": { - "source": "iana" - }, - "application/mxf": { - "source": "iana", - "extensions": ["mxf"] - }, - "application/n-quads": { - "source": "iana", - "extensions": ["nq"] - }, - "application/n-triples": { - "source": "iana", - "extensions": ["nt"] - }, - "application/nasdata": { - "source": "iana" - }, - "application/news-checkgroups": { - "source": "iana" - }, - "application/news-groupinfo": { - "source": "iana" - }, - "application/news-transmission": { - "source": "iana" - }, - "application/nlsml+xml": { - "source": "iana", - "compressible": true - }, - "application/node": { - "source": "iana" - }, - "application/nss": { - "source": "iana" - }, - "application/ocsp-request": { - "source": "iana" - }, - "application/ocsp-response": { - "source": "iana" - }, - "application/octet-stream": { - "source": "iana", - "compressible": false, - "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] - }, - "application/oda": { - "source": "iana", - "extensions": ["oda"] - }, - "application/odm+xml": { - "source": "iana", - "compressible": true - }, - "application/odx": { - "source": "iana" - }, - "application/oebps-package+xml": { - "source": "iana", - "compressible": true, - "extensions": ["opf"] - }, - "application/ogg": { - "source": "iana", - "compressible": false, - "extensions": ["ogx"] - }, - "application/omdoc+xml": { - "source": "apache", - "compressible": true, - "extensions": ["omdoc"] - }, - "application/onenote": { - "source": "apache", - "extensions": ["onetoc","onetoc2","onetmp","onepkg"] - }, - "application/oscore": { - "source": "iana" - }, - "application/oxps": { - "source": "iana", - "extensions": ["oxps"] - }, - "application/p2p-overlay+xml": { - "source": "iana", - "compressible": true, - "extensions": ["relo"] - }, - "application/parityfec": { - "source": "iana" - }, - "application/passport": { - "source": "iana" - }, - "application/patch-ops-error+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xer"] - }, - "application/pdf": { - "source": "iana", - "compressible": false, - "extensions": ["pdf"] - }, - "application/pdx": { - "source": "iana" - }, - "application/pem-certificate-chain": { - "source": "iana" - }, - "application/pgp-encrypted": { - "source": "iana", - "compressible": false, - "extensions": ["pgp"] - }, - "application/pgp-keys": { - "source": "iana" - }, - "application/pgp-signature": { - "source": "iana", - "extensions": ["asc","sig"] - }, - "application/pics-rules": { - "source": "apache", - "extensions": ["prf"] - }, - "application/pidf+xml": { - "source": "iana", - "compressible": true - }, - "application/pidf-diff+xml": { - "source": "iana", - "compressible": true - }, - "application/pkcs10": { - "source": "iana", - "extensions": ["p10"] - }, - "application/pkcs12": { - "source": "iana" - }, - "application/pkcs7-mime": { - "source": "iana", - "extensions": ["p7m","p7c"] - }, - "application/pkcs7-signature": { - "source": "iana", - "extensions": ["p7s"] - }, - "application/pkcs8": { - "source": "iana", - "extensions": ["p8"] - }, - "application/pkcs8-encrypted": { - "source": "iana" - }, - "application/pkix-attr-cert": { - "source": "iana", - "extensions": ["ac"] - }, - "application/pkix-cert": { - "source": "iana", - "extensions": ["cer"] - }, - "application/pkix-crl": { - "source": "iana", - "extensions": ["crl"] - }, - "application/pkix-pkipath": { - "source": "iana", - "extensions": ["pkipath"] - }, - "application/pkixcmp": { - "source": "iana", - "extensions": ["pki"] - }, - "application/pls+xml": { - "source": "iana", - "compressible": true, - "extensions": ["pls"] - }, - "application/poc-settings+xml": { - "source": "iana", - "compressible": true - }, - "application/postscript": { - "source": "iana", - "compressible": true, - "extensions": ["ai","eps","ps"] - }, - "application/ppsp-tracker+json": { - "source": "iana", - "compressible": true - }, - "application/problem+json": { - "source": "iana", - "compressible": true - }, - "application/problem+xml": { - "source": "iana", - "compressible": true - }, - "application/provenance+xml": { - "source": "iana", - "compressible": true, - "extensions": ["provx"] - }, - "application/prs.alvestrand.titrax-sheet": { - "source": "iana" - }, - "application/prs.cww": { - "source": "iana", - "extensions": ["cww"] - }, - "application/prs.hpub+zip": { - "source": "iana", - "compressible": false - }, - "application/prs.nprend": { - "source": "iana" - }, - "application/prs.plucker": { - "source": "iana" - }, - "application/prs.rdf-xml-crypt": { - "source": "iana" - }, - "application/prs.xsf+xml": { - "source": "iana", - "compressible": true - }, - "application/pskc+xml": { - "source": "iana", - "compressible": true, - "extensions": ["pskcxml"] - }, - "application/qsig": { - "source": "iana" - }, - "application/raml+yaml": { - "compressible": true, - "extensions": ["raml"] - }, - "application/raptorfec": { - "source": "iana" - }, - "application/rdap+json": { - "source": "iana", - "compressible": true - }, - "application/rdf+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rdf","owl"] - }, - "application/reginfo+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rif"] - }, - "application/relax-ng-compact-syntax": { - "source": "iana", - "extensions": ["rnc"] - }, - "application/remote-printing": { - "source": "iana" - }, - "application/reputon+json": { - "source": "iana", - "compressible": true - }, - "application/resource-lists+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rl"] - }, - "application/resource-lists-diff+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rld"] - }, - "application/rfc+xml": { - "source": "iana", - "compressible": true - }, - "application/riscos": { - "source": "iana" - }, - "application/rlmi+xml": { - "source": "iana", - "compressible": true - }, - "application/rls-services+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rs"] - }, - "application/route-apd+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rapd"] - }, - "application/route-s-tsid+xml": { - "source": "iana", - "compressible": true, - "extensions": ["sls"] - }, - "application/route-usd+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rusd"] - }, - "application/rpki-ghostbusters": { - "source": "iana", - "extensions": ["gbr"] - }, - "application/rpki-manifest": { - "source": "iana", - "extensions": ["mft"] - }, - "application/rpki-publication": { - "source": "iana" - }, - "application/rpki-roa": { - "source": "iana", - "extensions": ["roa"] - }, - "application/rpki-updown": { - "source": "iana" - }, - "application/rsd+xml": { - "source": "apache", - "compressible": true, - "extensions": ["rsd"] - }, - "application/rss+xml": { - "source": "apache", - "compressible": true, - "extensions": ["rss"] - }, - "application/rtf": { - "source": "iana", - "compressible": true, - "extensions": ["rtf"] - }, - "application/rtploopback": { - "source": "iana" - }, - "application/rtx": { - "source": "iana" - }, - "application/samlassertion+xml": { - "source": "iana", - "compressible": true - }, - "application/samlmetadata+xml": { - "source": "iana", - "compressible": true - }, - "application/sbml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["sbml"] - }, - "application/scaip+xml": { - "source": "iana", - "compressible": true - }, - "application/scim+json": { - "source": "iana", - "compressible": true - }, - "application/scvp-cv-request": { - "source": "iana", - "extensions": ["scq"] - }, - "application/scvp-cv-response": { - "source": "iana", - "extensions": ["scs"] - }, - "application/scvp-vp-request": { - "source": "iana", - "extensions": ["spq"] - }, - "application/scvp-vp-response": { - "source": "iana", - "extensions": ["spp"] - }, - "application/sdp": { - "source": "iana", - "extensions": ["sdp"] - }, - "application/secevent+jwt": { - "source": "iana" - }, - "application/senml+cbor": { - "source": "iana" - }, - "application/senml+json": { - "source": "iana", - "compressible": true - }, - "application/senml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["senmlx"] - }, - "application/senml-exi": { - "source": "iana" - }, - "application/sensml+cbor": { - "source": "iana" - }, - "application/sensml+json": { - "source": "iana", - "compressible": true - }, - "application/sensml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["sensmlx"] - }, - "application/sensml-exi": { - "source": "iana" - }, - "application/sep+xml": { - "source": "iana", - "compressible": true - }, - "application/sep-exi": { - "source": "iana" - }, - "application/session-info": { - "source": "iana" - }, - "application/set-payment": { - "source": "iana" - }, - "application/set-payment-initiation": { - "source": "iana", - "extensions": ["setpay"] - }, - "application/set-registration": { - "source": "iana" - }, - "application/set-registration-initiation": { - "source": "iana", - "extensions": ["setreg"] - }, - "application/sgml": { - "source": "iana" - }, - "application/sgml-open-catalog": { - "source": "iana" - }, - "application/shf+xml": { - "source": "iana", - "compressible": true, - "extensions": ["shf"] - }, - "application/sieve": { - "source": "iana", - "extensions": ["siv","sieve"] - }, - "application/simple-filter+xml": { - "source": "iana", - "compressible": true - }, - "application/simple-message-summary": { - "source": "iana" - }, - "application/simplesymbolcontainer": { - "source": "iana" - }, - "application/sipc": { - "source": "iana" - }, - "application/slate": { - "source": "iana" - }, - "application/smil": { - "source": "iana" - }, - "application/smil+xml": { - "source": "iana", - "compressible": true, - "extensions": ["smi","smil"] - }, - "application/smpte336m": { - "source": "iana" - }, - "application/soap+fastinfoset": { - "source": "iana" - }, - "application/soap+xml": { - "source": "iana", - "compressible": true - }, - "application/sparql-query": { - "source": "iana", - "extensions": ["rq"] - }, - "application/sparql-results+xml": { - "source": "iana", - "compressible": true, - "extensions": ["srx"] - }, - "application/spirits-event+xml": { - "source": "iana", - "compressible": true - }, - "application/sql": { - "source": "iana" - }, - "application/srgs": { - "source": "iana", - "extensions": ["gram"] - }, - "application/srgs+xml": { - "source": "iana", - "compressible": true, - "extensions": ["grxml"] - }, - "application/sru+xml": { - "source": "iana", - "compressible": true, - "extensions": ["sru"] - }, - "application/ssdl+xml": { - "source": "apache", - "compressible": true, - "extensions": ["ssdl"] - }, - "application/ssml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ssml"] - }, - "application/stix+json": { - "source": "iana", - "compressible": true - }, - "application/swid+xml": { - "source": "iana", - "compressible": true, - "extensions": ["swidtag"] - }, - "application/tamp-apex-update": { - "source": "iana" - }, - "application/tamp-apex-update-confirm": { - "source": "iana" - }, - "application/tamp-community-update": { - "source": "iana" - }, - "application/tamp-community-update-confirm": { - "source": "iana" - }, - "application/tamp-error": { - "source": "iana" - }, - "application/tamp-sequence-adjust": { - "source": "iana" - }, - "application/tamp-sequence-adjust-confirm": { - "source": "iana" - }, - "application/tamp-status-query": { - "source": "iana" - }, - "application/tamp-status-response": { - "source": "iana" - }, - "application/tamp-update": { - "source": "iana" - }, - "application/tamp-update-confirm": { - "source": "iana" - }, - "application/tar": { - "compressible": true - }, - "application/taxii+json": { - "source": "iana", - "compressible": true - }, - "application/tei+xml": { - "source": "iana", - "compressible": true, - "extensions": ["tei","teicorpus"] - }, - "application/tetra_isi": { - "source": "iana" - }, - "application/thraud+xml": { - "source": "iana", - "compressible": true, - "extensions": ["tfi"] - }, - "application/timestamp-query": { - "source": "iana" - }, - "application/timestamp-reply": { - "source": "iana" - }, - "application/timestamped-data": { - "source": "iana", - "extensions": ["tsd"] - }, - "application/tlsrpt+gzip": { - "source": "iana" - }, - "application/tlsrpt+json": { - "source": "iana", - "compressible": true - }, - "application/tnauthlist": { - "source": "iana" - }, - "application/toml": { - "compressible": true, - "extensions": ["toml"] - }, - "application/trickle-ice-sdpfrag": { - "source": "iana" - }, - "application/trig": { - "source": "iana" - }, - "application/ttml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ttml"] - }, - "application/tve-trigger": { - "source": "iana" - }, - "application/tzif": { - "source": "iana" - }, - "application/tzif-leap": { - "source": "iana" - }, - "application/ulpfec": { - "source": "iana" - }, - "application/urc-grpsheet+xml": { - "source": "iana", - "compressible": true - }, - "application/urc-ressheet+xml": { - "source": "iana", - "compressible": true, - "extensions": ["rsheet"] - }, - "application/urc-targetdesc+xml": { - "source": "iana", - "compressible": true - }, - "application/urc-uisocketdesc+xml": { - "source": "iana", - "compressible": true - }, - "application/vcard+json": { - "source": "iana", - "compressible": true - }, - "application/vcard+xml": { - "source": "iana", - "compressible": true - }, - "application/vemmi": { - "source": "iana" - }, - "application/vividence.scriptfile": { - "source": "apache" - }, - "application/vnd.1000minds.decision-model+xml": { - "source": "iana", - "compressible": true, - "extensions": ["1km"] - }, - "application/vnd.3gpp-prose+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp-prose-pc3ch+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp-v2x-local-service-information": { - "source": "iana" - }, - "application/vnd.3gpp.access-transfer-events+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.bsf+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.gmop+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mc-signalling-ear": { - "source": "iana" - }, - "application/vnd.3gpp.mcdata-affiliation-command+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcdata-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcdata-payload": { - "source": "iana" - }, - "application/vnd.3gpp.mcdata-service-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcdata-signalling": { - "source": "iana" - }, - "application/vnd.3gpp.mcdata-ue-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcdata-user-profile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-affiliation-command+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-floor-request+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-location-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-service-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-signed+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-ue-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-ue-init-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcptt-user-profile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-affiliation-command+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-affiliation-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-location-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-service-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-transmission-request+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-ue-config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mcvideo-user-profile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.mid-call+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.pic-bw-large": { - "source": "iana", - "extensions": ["plb"] - }, - "application/vnd.3gpp.pic-bw-small": { - "source": "iana", - "extensions": ["psb"] - }, - "application/vnd.3gpp.pic-bw-var": { - "source": "iana", - "extensions": ["pvb"] - }, - "application/vnd.3gpp.sms": { - "source": "iana" - }, - "application/vnd.3gpp.sms+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.srvcc-ext+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.srvcc-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.state-and-event-info+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp.ussd+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp2.bcmcsinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.3gpp2.sms": { - "source": "iana" - }, - "application/vnd.3gpp2.tcap": { - "source": "iana", - "extensions": ["tcap"] - }, - "application/vnd.3lightssoftware.imagescal": { - "source": "iana" - }, - "application/vnd.3m.post-it-notes": { - "source": "iana", - "extensions": ["pwn"] - }, - "application/vnd.accpac.simply.aso": { - "source": "iana", - "extensions": ["aso"] - }, - "application/vnd.accpac.simply.imp": { - "source": "iana", - "extensions": ["imp"] - }, - "application/vnd.acucobol": { - "source": "iana", - "extensions": ["acu"] - }, - "application/vnd.acucorp": { - "source": "iana", - "extensions": ["atc","acutc"] - }, - "application/vnd.adobe.air-application-installer-package+zip": { - "source": "apache", - "compressible": false, - "extensions": ["air"] - }, - "application/vnd.adobe.flash.movie": { - "source": "iana" - }, - "application/vnd.adobe.formscentral.fcdt": { - "source": "iana", - "extensions": ["fcdt"] - }, - "application/vnd.adobe.fxp": { - "source": "iana", - "extensions": ["fxp","fxpl"] - }, - "application/vnd.adobe.partial-upload": { - "source": "iana" - }, - "application/vnd.adobe.xdp+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdp"] - }, - "application/vnd.adobe.xfdf": { - "source": "iana", - "extensions": ["xfdf"] - }, - "application/vnd.aether.imp": { - "source": "iana" - }, - "application/vnd.afpc.afplinedata": { - "source": "iana" - }, - "application/vnd.afpc.afplinedata-pagedef": { - "source": "iana" - }, - "application/vnd.afpc.foca-charset": { - "source": "iana" - }, - "application/vnd.afpc.foca-codedfont": { - "source": "iana" - }, - "application/vnd.afpc.foca-codepage": { - "source": "iana" - }, - "application/vnd.afpc.modca": { - "source": "iana" - }, - "application/vnd.afpc.modca-formdef": { - "source": "iana" - }, - "application/vnd.afpc.modca-mediummap": { - "source": "iana" - }, - "application/vnd.afpc.modca-objectcontainer": { - "source": "iana" - }, - "application/vnd.afpc.modca-overlay": { - "source": "iana" - }, - "application/vnd.afpc.modca-pagesegment": { - "source": "iana" - }, - "application/vnd.ah-barcode": { - "source": "iana" - }, - "application/vnd.ahead.space": { - "source": "iana", - "extensions": ["ahead"] - }, - "application/vnd.airzip.filesecure.azf": { - "source": "iana", - "extensions": ["azf"] - }, - "application/vnd.airzip.filesecure.azs": { - "source": "iana", - "extensions": ["azs"] - }, - "application/vnd.amadeus+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.amazon.ebook": { - "source": "apache", - "extensions": ["azw"] - }, - "application/vnd.amazon.mobi8-ebook": { - "source": "iana" - }, - "application/vnd.americandynamics.acc": { - "source": "iana", - "extensions": ["acc"] - }, - "application/vnd.amiga.ami": { - "source": "iana", - "extensions": ["ami"] - }, - "application/vnd.amundsen.maze+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.android.ota": { - "source": "iana" - }, - "application/vnd.android.package-archive": { - "source": "apache", - "compressible": false, - "extensions": ["apk"] - }, - "application/vnd.anki": { - "source": "iana" - }, - "application/vnd.anser-web-certificate-issue-initiation": { - "source": "iana", - "extensions": ["cii"] - }, - "application/vnd.anser-web-funds-transfer-initiation": { - "source": "apache", - "extensions": ["fti"] - }, - "application/vnd.antix.game-component": { - "source": "iana", - "extensions": ["atx"] - }, - "application/vnd.apache.thrift.binary": { - "source": "iana" - }, - "application/vnd.apache.thrift.compact": { - "source": "iana" - }, - "application/vnd.apache.thrift.json": { - "source": "iana" - }, - "application/vnd.api+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.aplextor.warrp+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.apothekende.reservation+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.apple.installer+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mpkg"] - }, - "application/vnd.apple.keynote": { - "source": "iana", - "extensions": ["keynote"] - }, - "application/vnd.apple.mpegurl": { - "source": "iana", - "extensions": ["m3u8"] - }, - "application/vnd.apple.numbers": { - "source": "iana", - "extensions": ["numbers"] - }, - "application/vnd.apple.pages": { - "source": "iana", - "extensions": ["pages"] - }, - "application/vnd.apple.pkpass": { - "compressible": false, - "extensions": ["pkpass"] - }, - "application/vnd.arastra.swi": { - "source": "iana" - }, - "application/vnd.aristanetworks.swi": { - "source": "iana", - "extensions": ["swi"] - }, - "application/vnd.artisan+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.artsquare": { - "source": "iana" - }, - "application/vnd.astraea-software.iota": { - "source": "iana", - "extensions": ["iota"] - }, - "application/vnd.audiograph": { - "source": "iana", - "extensions": ["aep"] - }, - "application/vnd.autopackage": { - "source": "iana" - }, - "application/vnd.avalon+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.avistar+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.balsamiq.bmml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["bmml"] - }, - "application/vnd.balsamiq.bmpr": { - "source": "iana" - }, - "application/vnd.banana-accounting": { - "source": "iana" - }, - "application/vnd.bbf.usp.error": { - "source": "iana" - }, - "application/vnd.bbf.usp.msg": { - "source": "iana" - }, - "application/vnd.bbf.usp.msg+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.bekitzur-stech+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.bint.med-content": { - "source": "iana" - }, - "application/vnd.biopax.rdf+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.blink-idb-value-wrapper": { - "source": "iana" - }, - "application/vnd.blueice.multipass": { - "source": "iana", - "extensions": ["mpm"] - }, - "application/vnd.bluetooth.ep.oob": { - "source": "iana" - }, - "application/vnd.bluetooth.le.oob": { - "source": "iana" - }, - "application/vnd.bmi": { - "source": "iana", - "extensions": ["bmi"] - }, - "application/vnd.bpf": { - "source": "iana" - }, - "application/vnd.bpf3": { - "source": "iana" - }, - "application/vnd.businessobjects": { - "source": "iana", - "extensions": ["rep"] - }, - "application/vnd.byu.uapi+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.cab-jscript": { - "source": "iana" - }, - "application/vnd.canon-cpdl": { - "source": "iana" - }, - "application/vnd.canon-lips": { - "source": "iana" - }, - "application/vnd.capasystems-pg+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.cendio.thinlinc.clientconf": { - "source": "iana" - }, - "application/vnd.century-systems.tcp_stream": { - "source": "iana" - }, - "application/vnd.chemdraw+xml": { - "source": "iana", - "compressible": true, - "extensions": ["cdxml"] - }, - "application/vnd.chess-pgn": { - "source": "iana" - }, - "application/vnd.chipnuts.karaoke-mmd": { - "source": "iana", - "extensions": ["mmd"] - }, - "application/vnd.ciedi": { - "source": "iana" - }, - "application/vnd.cinderella": { - "source": "iana", - "extensions": ["cdy"] - }, - "application/vnd.cirpack.isdn-ext": { - "source": "iana" - }, - "application/vnd.citationstyles.style+xml": { - "source": "iana", - "compressible": true, - "extensions": ["csl"] - }, - "application/vnd.claymore": { - "source": "iana", - "extensions": ["cla"] - }, - "application/vnd.cloanto.rp9": { - "source": "iana", - "extensions": ["rp9"] - }, - "application/vnd.clonk.c4group": { - "source": "iana", - "extensions": ["c4g","c4d","c4f","c4p","c4u"] - }, - "application/vnd.cluetrust.cartomobile-config": { - "source": "iana", - "extensions": ["c11amc"] - }, - "application/vnd.cluetrust.cartomobile-config-pkg": { - "source": "iana", - "extensions": ["c11amz"] - }, - "application/vnd.coffeescript": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.document": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.document-template": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.presentation": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.presentation-template": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.spreadsheet": { - "source": "iana" - }, - "application/vnd.collabio.xodocuments.spreadsheet-template": { - "source": "iana" - }, - "application/vnd.collection+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.collection.doc+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.collection.next+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.comicbook+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.comicbook-rar": { - "source": "iana" - }, - "application/vnd.commerce-battelle": { - "source": "iana" - }, - "application/vnd.commonspace": { - "source": "iana", - "extensions": ["csp"] - }, - "application/vnd.contact.cmsg": { - "source": "iana", - "extensions": ["cdbcmsg"] - }, - "application/vnd.coreos.ignition+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.cosmocaller": { - "source": "iana", - "extensions": ["cmc"] - }, - "application/vnd.crick.clicker": { - "source": "iana", - "extensions": ["clkx"] - }, - "application/vnd.crick.clicker.keyboard": { - "source": "iana", - "extensions": ["clkk"] - }, - "application/vnd.crick.clicker.palette": { - "source": "iana", - "extensions": ["clkp"] - }, - "application/vnd.crick.clicker.template": { - "source": "iana", - "extensions": ["clkt"] - }, - "application/vnd.crick.clicker.wordbank": { - "source": "iana", - "extensions": ["clkw"] - }, - "application/vnd.criticaltools.wbs+xml": { - "source": "iana", - "compressible": true, - "extensions": ["wbs"] - }, - "application/vnd.cryptii.pipe+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.crypto-shade-file": { - "source": "iana" - }, - "application/vnd.ctc-posml": { - "source": "iana", - "extensions": ["pml"] - }, - "application/vnd.ctct.ws+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.cups-pdf": { - "source": "iana" - }, - "application/vnd.cups-postscript": { - "source": "iana" - }, - "application/vnd.cups-ppd": { - "source": "iana", - "extensions": ["ppd"] - }, - "application/vnd.cups-raster": { - "source": "iana" - }, - "application/vnd.cups-raw": { - "source": "iana" - }, - "application/vnd.curl": { - "source": "iana" - }, - "application/vnd.curl.car": { - "source": "apache", - "extensions": ["car"] - }, - "application/vnd.curl.pcurl": { - "source": "apache", - "extensions": ["pcurl"] - }, - "application/vnd.cyan.dean.root+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.cybank": { - "source": "iana" - }, - "application/vnd.d2l.coursepackage1p0+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.dart": { - "source": "iana", - "compressible": true, - "extensions": ["dart"] - }, - "application/vnd.data-vision.rdz": { - "source": "iana", - "extensions": ["rdz"] - }, - "application/vnd.datapackage+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.dataresource+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.debian.binary-package": { - "source": "iana" - }, - "application/vnd.dece.data": { - "source": "iana", - "extensions": ["uvf","uvvf","uvd","uvvd"] - }, - "application/vnd.dece.ttml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["uvt","uvvt"] - }, - "application/vnd.dece.unspecified": { - "source": "iana", - "extensions": ["uvx","uvvx"] - }, - "application/vnd.dece.zip": { - "source": "iana", - "extensions": ["uvz","uvvz"] - }, - "application/vnd.denovo.fcselayout-link": { - "source": "iana", - "extensions": ["fe_launch"] - }, - "application/vnd.desmume.movie": { - "source": "iana" - }, - "application/vnd.dir-bi.plate-dl-nosuffix": { - "source": "iana" - }, - "application/vnd.dm.delegation+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dna": { - "source": "iana", - "extensions": ["dna"] - }, - "application/vnd.document+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.dolby.mlp": { - "source": "apache", - "extensions": ["mlp"] - }, - "application/vnd.dolby.mobile.1": { - "source": "iana" - }, - "application/vnd.dolby.mobile.2": { - "source": "iana" - }, - "application/vnd.doremir.scorecloud-binary-document": { - "source": "iana" - }, - "application/vnd.dpgraph": { - "source": "iana", - "extensions": ["dpg"] - }, - "application/vnd.dreamfactory": { - "source": "iana", - "extensions": ["dfac"] - }, - "application/vnd.drive+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ds-keypoint": { - "source": "apache", - "extensions": ["kpxx"] - }, - "application/vnd.dtg.local": { - "source": "iana" - }, - "application/vnd.dtg.local.flash": { - "source": "iana" - }, - "application/vnd.dtg.local.html": { - "source": "iana" - }, - "application/vnd.dvb.ait": { - "source": "iana", - "extensions": ["ait"] - }, - "application/vnd.dvb.dvbj": { - "source": "iana" - }, - "application/vnd.dvb.esgcontainer": { - "source": "iana" - }, - "application/vnd.dvb.ipdcdftnotifaccess": { - "source": "iana" - }, - "application/vnd.dvb.ipdcesgaccess": { - "source": "iana" - }, - "application/vnd.dvb.ipdcesgaccess2": { - "source": "iana" - }, - "application/vnd.dvb.ipdcesgpdd": { - "source": "iana" - }, - "application/vnd.dvb.ipdcroaming": { - "source": "iana" - }, - "application/vnd.dvb.iptv.alfec-base": { - "source": "iana" - }, - "application/vnd.dvb.iptv.alfec-enhancement": { - "source": "iana" - }, - "application/vnd.dvb.notif-aggregate-root+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-container+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-generic+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-ia-msglist+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-ia-registration-request+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-ia-registration-response+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.notif-init+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.dvb.pfr": { - "source": "iana" - }, - "application/vnd.dvb.service": { - "source": "iana", - "extensions": ["svc"] - }, - "application/vnd.dxr": { - "source": "iana" - }, - "application/vnd.dynageo": { - "source": "iana", - "extensions": ["geo"] - }, - "application/vnd.dzr": { - "source": "iana" - }, - "application/vnd.easykaraoke.cdgdownload": { - "source": "iana" - }, - "application/vnd.ecdis-update": { - "source": "iana" - }, - "application/vnd.ecip.rlp": { - "source": "iana" - }, - "application/vnd.ecowin.chart": { - "source": "iana", - "extensions": ["mag"] - }, - "application/vnd.ecowin.filerequest": { - "source": "iana" - }, - "application/vnd.ecowin.fileupdate": { - "source": "iana" - }, - "application/vnd.ecowin.series": { - "source": "iana" - }, - "application/vnd.ecowin.seriesrequest": { - "source": "iana" - }, - "application/vnd.ecowin.seriesupdate": { - "source": "iana" - }, - "application/vnd.efi.img": { - "source": "iana" - }, - "application/vnd.efi.iso": { - "source": "iana" - }, - "application/vnd.emclient.accessrequest+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.enliven": { - "source": "iana", - "extensions": ["nml"] - }, - "application/vnd.enphase.envoy": { - "source": "iana" - }, - "application/vnd.eprints.data+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.epson.esf": { - "source": "iana", - "extensions": ["esf"] - }, - "application/vnd.epson.msf": { - "source": "iana", - "extensions": ["msf"] - }, - "application/vnd.epson.quickanime": { - "source": "iana", - "extensions": ["qam"] - }, - "application/vnd.epson.salt": { - "source": "iana", - "extensions": ["slt"] - }, - "application/vnd.epson.ssf": { - "source": "iana", - "extensions": ["ssf"] - }, - "application/vnd.ericsson.quickcall": { - "source": "iana" - }, - "application/vnd.espass-espass+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.eszigno3+xml": { - "source": "iana", - "compressible": true, - "extensions": ["es3","et3"] - }, - "application/vnd.etsi.aoc+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.asic-e+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.etsi.asic-s+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.etsi.cug+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvcommand+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvdiscovery+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvprofile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvsad-bc+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvsad-cod+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvsad-npvr+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvservice+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvsync+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.iptvueprofile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.mcid+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.mheg5": { - "source": "iana" - }, - "application/vnd.etsi.overload-control-policy-dataset+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.pstn+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.sci+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.simservs+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.timestamp-token": { - "source": "iana" - }, - "application/vnd.etsi.tsl+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.etsi.tsl.der": { - "source": "iana" - }, - "application/vnd.eudora.data": { - "source": "iana" - }, - "application/vnd.evolv.ecig.profile": { - "source": "iana" - }, - "application/vnd.evolv.ecig.settings": { - "source": "iana" - }, - "application/vnd.evolv.ecig.theme": { - "source": "iana" - }, - "application/vnd.exstream-empower+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.exstream-package": { - "source": "iana" - }, - "application/vnd.ezpix-album": { - "source": "iana", - "extensions": ["ez2"] - }, - "application/vnd.ezpix-package": { - "source": "iana", - "extensions": ["ez3"] - }, - "application/vnd.f-secure.mobile": { - "source": "iana" - }, - "application/vnd.fastcopy-disk-image": { - "source": "iana" - }, - "application/vnd.fdf": { - "source": "iana", - "extensions": ["fdf"] - }, - "application/vnd.fdsn.mseed": { - "source": "iana", - "extensions": ["mseed"] - }, - "application/vnd.fdsn.seed": { - "source": "iana", - "extensions": ["seed","dataless"] - }, - "application/vnd.ffsns": { - "source": "iana" - }, - "application/vnd.ficlab.flb+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.filmit.zfc": { - "source": "iana" - }, - "application/vnd.fints": { - "source": "iana" - }, - "application/vnd.firemonkeys.cloudcell": { - "source": "iana" - }, - "application/vnd.flographit": { - "source": "iana", - "extensions": ["gph"] - }, - "application/vnd.fluxtime.clip": { - "source": "iana", - "extensions": ["ftc"] - }, - "application/vnd.font-fontforge-sfd": { - "source": "iana" - }, - "application/vnd.framemaker": { - "source": "iana", - "extensions": ["fm","frame","maker","book"] - }, - "application/vnd.frogans.fnc": { - "source": "iana", - "extensions": ["fnc"] - }, - "application/vnd.frogans.ltf": { - "source": "iana", - "extensions": ["ltf"] - }, - "application/vnd.fsc.weblaunch": { - "source": "iana", - "extensions": ["fsc"] - }, - "application/vnd.fujitsu.oasys": { - "source": "iana", - "extensions": ["oas"] - }, - "application/vnd.fujitsu.oasys2": { - "source": "iana", - "extensions": ["oa2"] - }, - "application/vnd.fujitsu.oasys3": { - "source": "iana", - "extensions": ["oa3"] - }, - "application/vnd.fujitsu.oasysgp": { - "source": "iana", - "extensions": ["fg5"] - }, - "application/vnd.fujitsu.oasysprs": { - "source": "iana", - "extensions": ["bh2"] - }, - "application/vnd.fujixerox.art-ex": { - "source": "iana" - }, - "application/vnd.fujixerox.art4": { - "source": "iana" - }, - "application/vnd.fujixerox.ddd": { - "source": "iana", - "extensions": ["ddd"] - }, - "application/vnd.fujixerox.docuworks": { - "source": "iana", - "extensions": ["xdw"] - }, - "application/vnd.fujixerox.docuworks.binder": { - "source": "iana", - "extensions": ["xbd"] - }, - "application/vnd.fujixerox.docuworks.container": { - "source": "iana" - }, - "application/vnd.fujixerox.hbpl": { - "source": "iana" - }, - "application/vnd.fut-misnet": { - "source": "iana" - }, - "application/vnd.futoin+cbor": { - "source": "iana" - }, - "application/vnd.futoin+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.fuzzysheet": { - "source": "iana", - "extensions": ["fzs"] - }, - "application/vnd.genomatix.tuxedo": { - "source": "iana", - "extensions": ["txd"] - }, - "application/vnd.gentics.grd+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.geo+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.geocube+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.geogebra.file": { - "source": "iana", - "extensions": ["ggb"] - }, - "application/vnd.geogebra.tool": { - "source": "iana", - "extensions": ["ggt"] - }, - "application/vnd.geometry-explorer": { - "source": "iana", - "extensions": ["gex","gre"] - }, - "application/vnd.geonext": { - "source": "iana", - "extensions": ["gxt"] - }, - "application/vnd.geoplan": { - "source": "iana", - "extensions": ["g2w"] - }, - "application/vnd.geospace": { - "source": "iana", - "extensions": ["g3w"] - }, - "application/vnd.gerber": { - "source": "iana" - }, - "application/vnd.globalplatform.card-content-mgt": { - "source": "iana" - }, - "application/vnd.globalplatform.card-content-mgt-response": { - "source": "iana" - }, - "application/vnd.gmx": { - "source": "iana", - "extensions": ["gmx"] - }, - "application/vnd.google-apps.document": { - "compressible": false, - "extensions": ["gdoc"] - }, - "application/vnd.google-apps.presentation": { - "compressible": false, - "extensions": ["gslides"] - }, - "application/vnd.google-apps.spreadsheet": { - "compressible": false, - "extensions": ["gsheet"] - }, - "application/vnd.google-earth.kml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["kml"] - }, - "application/vnd.google-earth.kmz": { - "source": "iana", - "compressible": false, - "extensions": ["kmz"] - }, - "application/vnd.gov.sk.e-form+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.gov.sk.e-form+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.gov.sk.xmldatacontainer+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.grafeq": { - "source": "iana", - "extensions": ["gqf","gqs"] - }, - "application/vnd.gridmp": { - "source": "iana" - }, - "application/vnd.groove-account": { - "source": "iana", - "extensions": ["gac"] - }, - "application/vnd.groove-help": { - "source": "iana", - "extensions": ["ghf"] - }, - "application/vnd.groove-identity-message": { - "source": "iana", - "extensions": ["gim"] - }, - "application/vnd.groove-injector": { - "source": "iana", - "extensions": ["grv"] - }, - "application/vnd.groove-tool-message": { - "source": "iana", - "extensions": ["gtm"] - }, - "application/vnd.groove-tool-template": { - "source": "iana", - "extensions": ["tpl"] - }, - "application/vnd.groove-vcard": { - "source": "iana", - "extensions": ["vcg"] - }, - "application/vnd.hal+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hal+xml": { - "source": "iana", - "compressible": true, - "extensions": ["hal"] - }, - "application/vnd.handheld-entertainment+xml": { - "source": "iana", - "compressible": true, - "extensions": ["zmm"] - }, - "application/vnd.hbci": { - "source": "iana", - "extensions": ["hbci"] - }, - "application/vnd.hc+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hcl-bireports": { - "source": "iana" - }, - "application/vnd.hdt": { - "source": "iana" - }, - "application/vnd.heroku+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hhe.lesson-player": { - "source": "iana", - "extensions": ["les"] - }, - "application/vnd.hp-hpgl": { - "source": "iana", - "extensions": ["hpgl"] - }, - "application/vnd.hp-hpid": { - "source": "iana", - "extensions": ["hpid"] - }, - "application/vnd.hp-hps": { - "source": "iana", - "extensions": ["hps"] - }, - "application/vnd.hp-jlyt": { - "source": "iana", - "extensions": ["jlt"] - }, - "application/vnd.hp-pcl": { - "source": "iana", - "extensions": ["pcl"] - }, - "application/vnd.hp-pclxl": { - "source": "iana", - "extensions": ["pclxl"] - }, - "application/vnd.httphone": { - "source": "iana" - }, - "application/vnd.hydrostatix.sof-data": { - "source": "iana", - "extensions": ["sfd-hdstx"] - }, - "application/vnd.hyper+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hyper-item+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hyperdrive+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.hzn-3d-crossword": { - "source": "iana" - }, - "application/vnd.ibm.afplinedata": { - "source": "iana" - }, - "application/vnd.ibm.electronic-media": { - "source": "iana" - }, - "application/vnd.ibm.minipay": { - "source": "iana", - "extensions": ["mpy"] - }, - "application/vnd.ibm.modcap": { - "source": "iana", - "extensions": ["afp","listafp","list3820"] - }, - "application/vnd.ibm.rights-management": { - "source": "iana", - "extensions": ["irm"] - }, - "application/vnd.ibm.secure-container": { - "source": "iana", - "extensions": ["sc"] - }, - "application/vnd.iccprofile": { - "source": "iana", - "extensions": ["icc","icm"] - }, - "application/vnd.ieee.1905": { - "source": "iana" - }, - "application/vnd.igloader": { - "source": "iana", - "extensions": ["igl"] - }, - "application/vnd.imagemeter.folder+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.imagemeter.image+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.immervision-ivp": { - "source": "iana", - "extensions": ["ivp"] - }, - "application/vnd.immervision-ivu": { - "source": "iana", - "extensions": ["ivu"] - }, - "application/vnd.ims.imsccv1p1": { - "source": "iana" - }, - "application/vnd.ims.imsccv1p2": { - "source": "iana" - }, - "application/vnd.ims.imsccv1p3": { - "source": "iana" - }, - "application/vnd.ims.lis.v2.result+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ims.lti.v2.toolconsumerprofile+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ims.lti.v2.toolproxy+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ims.lti.v2.toolproxy.id+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ims.lti.v2.toolsettings+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.ims.lti.v2.toolsettings.simple+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.informedcontrol.rms+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.informix-visionary": { - "source": "iana" - }, - "application/vnd.infotech.project": { - "source": "iana" - }, - "application/vnd.infotech.project+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.innopath.wamp.notification": { - "source": "iana" - }, - "application/vnd.insors.igm": { - "source": "iana", - "extensions": ["igm"] - }, - "application/vnd.intercon.formnet": { - "source": "iana", - "extensions": ["xpw","xpx"] - }, - "application/vnd.intergeo": { - "source": "iana", - "extensions": ["i2g"] - }, - "application/vnd.intertrust.digibox": { - "source": "iana" - }, - "application/vnd.intertrust.nncp": { - "source": "iana" - }, - "application/vnd.intu.qbo": { - "source": "iana", - "extensions": ["qbo"] - }, - "application/vnd.intu.qfx": { - "source": "iana", - "extensions": ["qfx"] - }, - "application/vnd.iptc.g2.catalogitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.conceptitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.knowledgeitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.newsitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.newsmessage+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.packageitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.iptc.g2.planningitem+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.ipunplugged.rcprofile": { - "source": "iana", - "extensions": ["rcprofile"] - }, - "application/vnd.irepository.package+xml": { - "source": "iana", - "compressible": true, - "extensions": ["irp"] - }, - "application/vnd.is-xpr": { - "source": "iana", - "extensions": ["xpr"] - }, - "application/vnd.isac.fcs": { - "source": "iana", - "extensions": ["fcs"] - }, - "application/vnd.iso11783-10+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.jam": { - "source": "iana", - "extensions": ["jam"] - }, - "application/vnd.japannet-directory-service": { - "source": "iana" - }, - "application/vnd.japannet-jpnstore-wakeup": { - "source": "iana" - }, - "application/vnd.japannet-payment-wakeup": { - "source": "iana" - }, - "application/vnd.japannet-registration": { - "source": "iana" - }, - "application/vnd.japannet-registration-wakeup": { - "source": "iana" - }, - "application/vnd.japannet-setstore-wakeup": { - "source": "iana" - }, - "application/vnd.japannet-verification": { - "source": "iana" - }, - "application/vnd.japannet-verification-wakeup": { - "source": "iana" - }, - "application/vnd.jcp.javame.midlet-rms": { - "source": "iana", - "extensions": ["rms"] - }, - "application/vnd.jisp": { - "source": "iana", - "extensions": ["jisp"] - }, - "application/vnd.joost.joda-archive": { - "source": "iana", - "extensions": ["joda"] - }, - "application/vnd.jsk.isdn-ngn": { - "source": "iana" - }, - "application/vnd.kahootz": { - "source": "iana", - "extensions": ["ktz","ktr"] - }, - "application/vnd.kde.karbon": { - "source": "iana", - "extensions": ["karbon"] - }, - "application/vnd.kde.kchart": { - "source": "iana", - "extensions": ["chrt"] - }, - "application/vnd.kde.kformula": { - "source": "iana", - "extensions": ["kfo"] - }, - "application/vnd.kde.kivio": { - "source": "iana", - "extensions": ["flw"] - }, - "application/vnd.kde.kontour": { - "source": "iana", - "extensions": ["kon"] - }, - "application/vnd.kde.kpresenter": { - "source": "iana", - "extensions": ["kpr","kpt"] - }, - "application/vnd.kde.kspread": { - "source": "iana", - "extensions": ["ksp"] - }, - "application/vnd.kde.kword": { - "source": "iana", - "extensions": ["kwd","kwt"] - }, - "application/vnd.kenameaapp": { - "source": "iana", - "extensions": ["htke"] - }, - "application/vnd.kidspiration": { - "source": "iana", - "extensions": ["kia"] - }, - "application/vnd.kinar": { - "source": "iana", - "extensions": ["kne","knp"] - }, - "application/vnd.koan": { - "source": "iana", - "extensions": ["skp","skd","skt","skm"] - }, - "application/vnd.kodak-descriptor": { - "source": "iana", - "extensions": ["sse"] - }, - "application/vnd.las": { - "source": "iana" - }, - "application/vnd.las.las+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.las.las+xml": { - "source": "iana", - "compressible": true, - "extensions": ["lasxml"] - }, - "application/vnd.laszip": { - "source": "iana" - }, - "application/vnd.leap+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.liberty-request+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.llamagraphics.life-balance.desktop": { - "source": "iana", - "extensions": ["lbd"] - }, - "application/vnd.llamagraphics.life-balance.exchange+xml": { - "source": "iana", - "compressible": true, - "extensions": ["lbe"] - }, - "application/vnd.logipipe.circuit+zip": { - "source": "iana", - "compressible": false - }, - "application/vnd.loom": { - "source": "iana" - }, - "application/vnd.lotus-1-2-3": { - "source": "iana", - "extensions": ["123"] - }, - "application/vnd.lotus-approach": { - "source": "iana", - "extensions": ["apr"] - }, - "application/vnd.lotus-freelance": { - "source": "iana", - "extensions": ["pre"] - }, - "application/vnd.lotus-notes": { - "source": "iana", - "extensions": ["nsf"] - }, - "application/vnd.lotus-organizer": { - "source": "iana", - "extensions": ["org"] - }, - "application/vnd.lotus-screencam": { - "source": "iana", - "extensions": ["scm"] - }, - "application/vnd.lotus-wordpro": { - "source": "iana", - "extensions": ["lwp"] - }, - "application/vnd.macports.portpkg": { - "source": "iana", - "extensions": ["portpkg"] - }, - "application/vnd.mapbox-vector-tile": { - "source": "iana" - }, - "application/vnd.marlin.drm.actiontoken+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.marlin.drm.conftoken+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.marlin.drm.license+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.marlin.drm.mdcf": { - "source": "iana" - }, - "application/vnd.mason+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.maxmind.maxmind-db": { - "source": "iana" - }, - "application/vnd.mcd": { - "source": "iana", - "extensions": ["mcd"] - }, - "application/vnd.medcalcdata": { - "source": "iana", - "extensions": ["mc1"] - }, - "application/vnd.mediastation.cdkey": { - "source": "iana", - "extensions": ["cdkey"] - }, - "application/vnd.meridian-slingshot": { - "source": "iana" - }, - "application/vnd.mfer": { - "source": "iana", - "extensions": ["mwf"] - }, - "application/vnd.mfmp": { - "source": "iana", - "extensions": ["mfm"] - }, - "application/vnd.micro+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.micrografx.flo": { - "source": "iana", - "extensions": ["flo"] - }, - "application/vnd.micrografx.igx": { - "source": "iana", - "extensions": ["igx"] - }, - "application/vnd.microsoft.portable-executable": { - "source": "iana" - }, - "application/vnd.microsoft.windows.thumbnail-cache": { - "source": "iana" - }, - "application/vnd.miele+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.mif": { - "source": "iana", - "extensions": ["mif"] - }, - "application/vnd.minisoft-hp3000-save": { - "source": "iana" - }, - "application/vnd.mitsubishi.misty-guard.trustweb": { - "source": "iana" - }, - "application/vnd.mobius.daf": { - "source": "iana", - "extensions": ["daf"] - }, - "application/vnd.mobius.dis": { - "source": "iana", - "extensions": ["dis"] - }, - "application/vnd.mobius.mbk": { - "source": "iana", - "extensions": ["mbk"] - }, - "application/vnd.mobius.mqy": { - "source": "iana", - "extensions": ["mqy"] - }, - "application/vnd.mobius.msl": { - "source": "iana", - "extensions": ["msl"] - }, - "application/vnd.mobius.plc": { - "source": "iana", - "extensions": ["plc"] - }, - "application/vnd.mobius.txf": { - "source": "iana", - "extensions": ["txf"] - }, - "application/vnd.mophun.application": { - "source": "iana", - "extensions": ["mpn"] - }, - "application/vnd.mophun.certificate": { - "source": "iana", - "extensions": ["mpc"] - }, - "application/vnd.motorola.flexsuite": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.adsi": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.fis": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.gotap": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.kmr": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.ttc": { - "source": "iana" - }, - "application/vnd.motorola.flexsuite.wem": { - "source": "iana" - }, - "application/vnd.motorola.iprm": { - "source": "iana" - }, - "application/vnd.mozilla.xul+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xul"] - }, - "application/vnd.ms-3mfdocument": { - "source": "iana" - }, - "application/vnd.ms-artgalry": { - "source": "iana", - "extensions": ["cil"] - }, - "application/vnd.ms-asf": { - "source": "iana" - }, - "application/vnd.ms-cab-compressed": { - "source": "iana", - "extensions": ["cab"] - }, - "application/vnd.ms-color.iccprofile": { - "source": "apache" - }, - "application/vnd.ms-excel": { - "source": "iana", - "compressible": false, - "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] - }, - "application/vnd.ms-excel.addin.macroenabled.12": { - "source": "iana", - "extensions": ["xlam"] - }, - "application/vnd.ms-excel.sheet.binary.macroenabled.12": { - "source": "iana", - "extensions": ["xlsb"] - }, - "application/vnd.ms-excel.sheet.macroenabled.12": { - "source": "iana", - "extensions": ["xlsm"] - }, - "application/vnd.ms-excel.template.macroenabled.12": { - "source": "iana", - "extensions": ["xltm"] - }, - "application/vnd.ms-fontobject": { - "source": "iana", - "compressible": true, - "extensions": ["eot"] - }, - "application/vnd.ms-htmlhelp": { - "source": "iana", - "extensions": ["chm"] - }, - "application/vnd.ms-ims": { - "source": "iana", - "extensions": ["ims"] - }, - "application/vnd.ms-lrm": { - "source": "iana", - "extensions": ["lrm"] - }, - "application/vnd.ms-office.activex+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.ms-officetheme": { - "source": "iana", - "extensions": ["thmx"] - }, - "application/vnd.ms-opentype": { - "source": "apache", - "compressible": true - }, - "application/vnd.ms-outlook": { - "compressible": false, - "extensions": ["msg"] - }, - "application/vnd.ms-package.obfuscated-opentype": { - "source": "apache" - }, - "application/vnd.ms-pki.seccat": { - "source": "apache", - "extensions": ["cat"] - }, - "application/vnd.ms-pki.stl": { - "source": "apache", - "extensions": ["stl"] - }, - "application/vnd.ms-playready.initiator+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.ms-powerpoint": { - "source": "iana", - "compressible": false, - "extensions": ["ppt","pps","pot"] - }, - "application/vnd.ms-powerpoint.addin.macroenabled.12": { - "source": "iana", - "extensions": ["ppam"] - }, - "application/vnd.ms-powerpoint.presentation.macroenabled.12": { - "source": "iana", - "extensions": ["pptm"] - }, - "application/vnd.ms-powerpoint.slide.macroenabled.12": { - "source": "iana", - "extensions": ["sldm"] - }, - "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { - "source": "iana", - "extensions": ["ppsm"] - }, - "application/vnd.ms-powerpoint.template.macroenabled.12": { - "source": "iana", - "extensions": ["potm"] - }, - "application/vnd.ms-printdevicecapabilities+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.ms-printing.printticket+xml": { - "source": "apache", - "compressible": true - }, - "application/vnd.ms-printschematicket+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.ms-project": { - "source": "iana", - "extensions": ["mpp","mpt"] - }, - "application/vnd.ms-tnef": { - "source": "iana" - }, - "application/vnd.ms-windows.devicepairing": { - "source": "iana" - }, - "application/vnd.ms-windows.nwprinting.oob": { - "source": "iana" - }, - "application/vnd.ms-windows.printerpairing": { - "source": "iana" - }, - "application/vnd.ms-windows.wsd.oob": { - "source": "iana" - }, - "application/vnd.ms-wmdrm.lic-chlg-req": { - "source": "iana" - }, - "application/vnd.ms-wmdrm.lic-resp": { - "source": "iana" - }, - "application/vnd.ms-wmdrm.meter-chlg-req": { - "source": "iana" - }, - "application/vnd.ms-wmdrm.meter-resp": { - "source": "iana" - }, - "application/vnd.ms-word.document.macroenabled.12": { - "source": "iana", - "extensions": ["docm"] - }, - "application/vnd.ms-word.template.macroenabled.12": { - "source": "iana", - "extensions": ["dotm"] - }, - "application/vnd.ms-works": { - "source": "iana", - "extensions": ["wps","wks","wcm","wdb"] - }, - "application/vnd.ms-wpl": { - "source": "iana", - "extensions": ["wpl"] - }, - "application/vnd.ms-xpsdocument": { - "source": "iana", - "compressible": false, - "extensions": ["xps"] - }, - "application/vnd.msa-disk-image": { - "source": "iana" - }, - "application/vnd.mseq": { - "source": "iana", - "extensions": ["mseq"] - }, - "application/vnd.msign": { - "source": "iana" - }, - "application/vnd.multiad.creator": { - "source": "iana" - }, - "application/vnd.multiad.creator.cif": { - "source": "iana" - }, - "application/vnd.music-niff": { - "source": "iana" - }, - "application/vnd.musician": { - "source": "iana", - "extensions": ["mus"] - }, - "application/vnd.muvee.style": { - "source": "iana", - "extensions": ["msty"] - }, - "application/vnd.mynfc": { - "source": "iana", - "extensions": ["taglet"] - }, - "application/vnd.ncd.control": { - "source": "iana" - }, - "application/vnd.ncd.reference": { - "source": "iana" - }, - "application/vnd.nearst.inv+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.nervana": { - "source": "iana" - }, - "application/vnd.netfpx": { - "source": "iana" - }, - "application/vnd.neurolanguage.nlu": { - "source": "iana", - "extensions": ["nlu"] - }, - "application/vnd.nimn": { - "source": "iana" - }, - "application/vnd.nintendo.nitro.rom": { - "source": "iana" - }, - "application/vnd.nintendo.snes.rom": { - "source": "iana" - }, - "application/vnd.nitf": { - "source": "iana", - "extensions": ["ntf","nitf"] - }, - "application/vnd.noblenet-directory": { - "source": "iana", - "extensions": ["nnd"] - }, - "application/vnd.noblenet-sealer": { - "source": "iana", - "extensions": ["nns"] - }, - "application/vnd.noblenet-web": { - "source": "iana", - "extensions": ["nnw"] - }, - "application/vnd.nokia.catalogs": { - "source": "iana" - }, - "application/vnd.nokia.conml+wbxml": { - "source": "iana" - }, - "application/vnd.nokia.conml+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.nokia.iptv.config+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.nokia.isds-radio-presets": { - "source": "iana" - }, - "application/vnd.nokia.landmark+wbxml": { - "source": "iana" - }, - "application/vnd.nokia.landmark+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.nokia.landmarkcollection+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.nokia.n-gage.ac+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ac"] - }, - "application/vnd.nokia.n-gage.data": { - "source": "iana", - "extensions": ["ngdat"] - }, - "application/vnd.nokia.n-gage.symbian.install": { - "source": "iana", - "extensions": ["n-gage"] - }, - "application/vnd.nokia.ncd": { - "source": "iana" - }, - "application/vnd.nokia.pcd+wbxml": { - "source": "iana" - }, - "application/vnd.nokia.pcd+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.nokia.radio-preset": { - "source": "iana", - "extensions": ["rpst"] - }, - "application/vnd.nokia.radio-presets": { - "source": "iana", - "extensions": ["rpss"] - }, - "application/vnd.novadigm.edm": { - "source": "iana", - "extensions": ["edm"] - }, - "application/vnd.novadigm.edx": { - "source": "iana", - "extensions": ["edx"] - }, - "application/vnd.novadigm.ext": { - "source": "iana", - "extensions": ["ext"] - }, - "application/vnd.ntt-local.content-share": { - "source": "iana" - }, - "application/vnd.ntt-local.file-transfer": { - "source": "iana" - }, - "application/vnd.ntt-local.ogw_remote-access": { - "source": "iana" - }, - "application/vnd.ntt-local.sip-ta_remote": { - "source": "iana" - }, - "application/vnd.ntt-local.sip-ta_tcp_stream": { - "source": "iana" - }, - "application/vnd.oasis.opendocument.chart": { - "source": "iana", - "extensions": ["odc"] - }, - "application/vnd.oasis.opendocument.chart-template": { - "source": "iana", - "extensions": ["otc"] - }, - "application/vnd.oasis.opendocument.database": { - "source": "iana", - "extensions": ["odb"] - }, - "application/vnd.oasis.opendocument.formula": { - "source": "iana", - "extensions": ["odf"] - }, - "application/vnd.oasis.opendocument.formula-template": { - "source": "iana", - "extensions": ["odft"] - }, - "application/vnd.oasis.opendocument.graphics": { - "source": "iana", - "compressible": false, - "extensions": ["odg"] - }, - "application/vnd.oasis.opendocument.graphics-template": { - "source": "iana", - "extensions": ["otg"] - }, - "application/vnd.oasis.opendocument.image": { - "source": "iana", - "extensions": ["odi"] - }, - "application/vnd.oasis.opendocument.image-template": { - "source": "iana", - "extensions": ["oti"] - }, - "application/vnd.oasis.opendocument.presentation": { - "source": "iana", - "compressible": false, - "extensions": ["odp"] - }, - "application/vnd.oasis.opendocument.presentation-template": { - "source": "iana", - "extensions": ["otp"] - }, - "application/vnd.oasis.opendocument.spreadsheet": { - "source": "iana", - "compressible": false, - "extensions": ["ods"] - }, - "application/vnd.oasis.opendocument.spreadsheet-template": { - "source": "iana", - "extensions": ["ots"] - }, - "application/vnd.oasis.opendocument.text": { - "source": "iana", - "compressible": false, - "extensions": ["odt"] - }, - "application/vnd.oasis.opendocument.text-master": { - "source": "iana", - "extensions": ["odm"] - }, - "application/vnd.oasis.opendocument.text-template": { - "source": "iana", - "extensions": ["ott"] - }, - "application/vnd.oasis.opendocument.text-web": { - "source": "iana", - "extensions": ["oth"] - }, - "application/vnd.obn": { - "source": "iana" - }, - "application/vnd.ocf+cbor": { - "source": "iana" - }, - "application/vnd.oftn.l10n+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.contentaccessdownload+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.contentaccessstreaming+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.cspg-hexbinary": { - "source": "iana" - }, - "application/vnd.oipf.dae.svg+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.dae.xhtml+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.mippvcontrolmessage+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.pae.gem": { - "source": "iana" - }, - "application/vnd.oipf.spdiscovery+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.spdlist+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.ueprofile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oipf.userprofile+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.olpc-sugar": { - "source": "iana", - "extensions": ["xo"] - }, - "application/vnd.oma-scws-config": { - "source": "iana" - }, - "application/vnd.oma-scws-http-request": { - "source": "iana" - }, - "application/vnd.oma-scws-http-response": { - "source": "iana" - }, - "application/vnd.oma.bcast.associated-procedure-parameter+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.drm-trigger+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.imd+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.ltkm": { - "source": "iana" - }, - "application/vnd.oma.bcast.notification+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.provisioningtrigger": { - "source": "iana" - }, - "application/vnd.oma.bcast.sgboot": { - "source": "iana" - }, - "application/vnd.oma.bcast.sgdd+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.sgdu": { - "source": "iana" - }, - "application/vnd.oma.bcast.simple-symbol-container": { - "source": "iana" - }, - "application/vnd.oma.bcast.smartcard-trigger+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.sprov+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.bcast.stkm": { - "source": "iana" - }, - "application/vnd.oma.cab-address-book+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.cab-feature-handler+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.cab-pcc+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.cab-subs-invite+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.cab-user-prefs+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.dcd": { - "source": "iana" - }, - "application/vnd.oma.dcdc": { - "source": "iana" - }, - "application/vnd.oma.dd2+xml": { - "source": "iana", - "compressible": true, - "extensions": ["dd2"] - }, - "application/vnd.oma.drm.risd+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.group-usage-list+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.lwm2m+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.lwm2m+tlv": { - "source": "iana" - }, - "application/vnd.oma.pal+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.poc.detailed-progress-report+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.poc.final-report+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.poc.groups+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.poc.invocation-descriptor+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.poc.optimized-progress-report+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.push": { - "source": "iana" - }, - "application/vnd.oma.scidm.messages+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oma.xcap-directory+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.omads-email+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.omads-file+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.omads-folder+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.omaloc-supl-init": { - "source": "iana" - }, - "application/vnd.onepager": { - "source": "iana" - }, - "application/vnd.onepagertamp": { - "source": "iana" - }, - "application/vnd.onepagertamx": { - "source": "iana" - }, - "application/vnd.onepagertat": { - "source": "iana" - }, - "application/vnd.onepagertatp": { - "source": "iana" - }, - "application/vnd.onepagertatx": { - "source": "iana" - }, - "application/vnd.openblox.game+xml": { - "source": "iana", - "compressible": true, - "extensions": ["obgx"] - }, - "application/vnd.openblox.game-binary": { - "source": "iana" - }, - "application/vnd.openeye.oeb": { - "source": "iana" - }, - "application/vnd.openofficeorg.extension": { - "source": "apache", - "extensions": ["oxt"] - }, - "application/vnd.openstreetmap.data+xml": { - "source": "iana", - "compressible": true, - "extensions": ["osm"] - }, - "application/vnd.openxmlformats-officedocument.custom-properties+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawing+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.extended-properties+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.presentation": { - "source": "iana", - "compressible": false, - "extensions": ["pptx"] - }, - "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.slide": { - "source": "iana", - "extensions": ["sldx"] - }, - "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { - "source": "iana", - "extensions": ["ppsx"] - }, - "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.template": { - "source": "iana", - "extensions": ["potx"] - }, - "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { - "source": "iana", - "compressible": false, - "extensions": ["xlsx"] - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { - "source": "iana", - "extensions": ["xltx"] - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.theme+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.themeoverride+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.vmldrawing": { - "source": "iana" - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { - "source": "iana", - "compressible": false, - "extensions": ["docx"] - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { - "source": "iana", - "extensions": ["dotx"] - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-package.core-properties+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.openxmlformats-package.relationships+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oracle.resource+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.orange.indata": { - "source": "iana" - }, - "application/vnd.osa.netdeploy": { - "source": "iana" - }, - "application/vnd.osgeo.mapguide.package": { - "source": "iana", - "extensions": ["mgp"] - }, - "application/vnd.osgi.bundle": { - "source": "iana" - }, - "application/vnd.osgi.dp": { - "source": "iana", - "extensions": ["dp"] - }, - "application/vnd.osgi.subsystem": { - "source": "iana", - "extensions": ["esa"] - }, - "application/vnd.otps.ct-kip+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.oxli.countgraph": { - "source": "iana" - }, - "application/vnd.pagerduty+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.palm": { - "source": "iana", - "extensions": ["pdb","pqa","oprc"] - }, - "application/vnd.panoply": { - "source": "iana" - }, - "application/vnd.paos.xml": { - "source": "iana" - }, - "application/vnd.patentdive": { - "source": "iana" - }, - "application/vnd.patientecommsdoc": { - "source": "iana" - }, - "application/vnd.pawaafile": { - "source": "iana", - "extensions": ["paw"] - }, - "application/vnd.pcos": { - "source": "iana" - }, - "application/vnd.pg.format": { - "source": "iana", - "extensions": ["str"] - }, - "application/vnd.pg.osasli": { - "source": "iana", - "extensions": ["ei6"] - }, - "application/vnd.piaccess.application-licence": { - "source": "iana" - }, - "application/vnd.picsel": { - "source": "iana", - "extensions": ["efif"] - }, - "application/vnd.pmi.widget": { - "source": "iana", - "extensions": ["wg"] - }, - "application/vnd.poc.group-advertisement+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.pocketlearn": { - "source": "iana", - "extensions": ["plf"] - }, - "application/vnd.powerbuilder6": { - "source": "iana", - "extensions": ["pbd"] - }, - "application/vnd.powerbuilder6-s": { - "source": "iana" - }, - "application/vnd.powerbuilder7": { - "source": "iana" - }, - "application/vnd.powerbuilder7-s": { - "source": "iana" - }, - "application/vnd.powerbuilder75": { - "source": "iana" - }, - "application/vnd.powerbuilder75-s": { - "source": "iana" - }, - "application/vnd.preminet": { - "source": "iana" - }, - "application/vnd.previewsystems.box": { - "source": "iana", - "extensions": ["box"] - }, - "application/vnd.proteus.magazine": { - "source": "iana", - "extensions": ["mgz"] - }, - "application/vnd.psfs": { - "source": "iana" - }, - "application/vnd.publishare-delta-tree": { - "source": "iana", - "extensions": ["qps"] - }, - "application/vnd.pvi.ptid1": { - "source": "iana", - "extensions": ["ptid"] - }, - "application/vnd.pwg-multiplexed": { - "source": "iana" - }, - "application/vnd.pwg-xhtml-print+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.qualcomm.brew-app-res": { - "source": "iana" - }, - "application/vnd.quarantainenet": { - "source": "iana" - }, - "application/vnd.quark.quarkxpress": { - "source": "iana", - "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] - }, - "application/vnd.quobject-quoxdocument": { - "source": "iana" - }, - "application/vnd.radisys.moml+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-audit+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-audit-conf+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-audit-conn+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-audit-dialog+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-audit-stream+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-conf+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-base+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-fax-detect+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-group+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-speech+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.radisys.msml-dialog-transform+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.rainstor.data": { - "source": "iana" - }, - "application/vnd.rapid": { - "source": "iana" - }, - "application/vnd.rar": { - "source": "iana" - }, - "application/vnd.realvnc.bed": { - "source": "iana", - "extensions": ["bed"] - }, - "application/vnd.recordare.musicxml": { - "source": "iana", - "extensions": ["mxl"] - }, - "application/vnd.recordare.musicxml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["musicxml"] - }, - "application/vnd.renlearn.rlprint": { - "source": "iana" - }, - "application/vnd.restful+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.rig.cryptonote": { - "source": "iana", - "extensions": ["cryptonote"] - }, - "application/vnd.rim.cod": { - "source": "apache", - "extensions": ["cod"] - }, - "application/vnd.rn-realmedia": { - "source": "apache", - "extensions": ["rm"] - }, - "application/vnd.rn-realmedia-vbr": { - "source": "apache", - "extensions": ["rmvb"] - }, - "application/vnd.route66.link66+xml": { - "source": "iana", - "compressible": true, - "extensions": ["link66"] - }, - "application/vnd.rs-274x": { - "source": "iana" - }, - "application/vnd.ruckus.download": { - "source": "iana" - }, - "application/vnd.s3sms": { - "source": "iana" - }, - "application/vnd.sailingtracker.track": { - "source": "iana", - "extensions": ["st"] - }, - "application/vnd.sbm.cid": { - "source": "iana" - }, - "application/vnd.sbm.mid2": { - "source": "iana" - }, - "application/vnd.scribus": { - "source": "iana" - }, - "application/vnd.sealed.3df": { - "source": "iana" - }, - "application/vnd.sealed.csf": { - "source": "iana" - }, - "application/vnd.sealed.doc": { - "source": "iana" - }, - "application/vnd.sealed.eml": { - "source": "iana" - }, - "application/vnd.sealed.mht": { - "source": "iana" - }, - "application/vnd.sealed.net": { - "source": "iana" - }, - "application/vnd.sealed.ppt": { - "source": "iana" - }, - "application/vnd.sealed.tiff": { - "source": "iana" - }, - "application/vnd.sealed.xls": { - "source": "iana" - }, - "application/vnd.sealedmedia.softseal.html": { - "source": "iana" - }, - "application/vnd.sealedmedia.softseal.pdf": { - "source": "iana" - }, - "application/vnd.seemail": { - "source": "iana", - "extensions": ["see"] - }, - "application/vnd.sema": { - "source": "iana", - "extensions": ["sema"] - }, - "application/vnd.semd": { - "source": "iana", - "extensions": ["semd"] - }, - "application/vnd.semf": { - "source": "iana", - "extensions": ["semf"] - }, - "application/vnd.shade-save-file": { - "source": "iana" - }, - "application/vnd.shana.informed.formdata": { - "source": "iana", - "extensions": ["ifm"] - }, - "application/vnd.shana.informed.formtemplate": { - "source": "iana", - "extensions": ["itp"] - }, - "application/vnd.shana.informed.interchange": { - "source": "iana", - "extensions": ["iif"] - }, - "application/vnd.shana.informed.package": { - "source": "iana", - "extensions": ["ipk"] - }, - "application/vnd.shootproof+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.shopkick+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.sigrok.session": { - "source": "iana" - }, - "application/vnd.simtech-mindmapper": { - "source": "iana", - "extensions": ["twd","twds"] - }, - "application/vnd.siren+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.smaf": { - "source": "iana", - "extensions": ["mmf"] - }, - "application/vnd.smart.notebook": { - "source": "iana" - }, - "application/vnd.smart.teacher": { - "source": "iana", - "extensions": ["teacher"] - }, - "application/vnd.software602.filler.form+xml": { - "source": "iana", - "compressible": true, - "extensions": ["fo"] - }, - "application/vnd.software602.filler.form-xml-zip": { - "source": "iana" - }, - "application/vnd.solent.sdkm+xml": { - "source": "iana", - "compressible": true, - "extensions": ["sdkm","sdkd"] - }, - "application/vnd.spotfire.dxp": { - "source": "iana", - "extensions": ["dxp"] - }, - "application/vnd.spotfire.sfs": { - "source": "iana", - "extensions": ["sfs"] - }, - "application/vnd.sqlite3": { - "source": "iana" - }, - "application/vnd.sss-cod": { - "source": "iana" - }, - "application/vnd.sss-dtf": { - "source": "iana" - }, - "application/vnd.sss-ntf": { - "source": "iana" - }, - "application/vnd.stardivision.calc": { - "source": "apache", - "extensions": ["sdc"] - }, - "application/vnd.stardivision.draw": { - "source": "apache", - "extensions": ["sda"] - }, - "application/vnd.stardivision.impress": { - "source": "apache", - "extensions": ["sdd"] - }, - "application/vnd.stardivision.math": { - "source": "apache", - "extensions": ["smf"] - }, - "application/vnd.stardivision.writer": { - "source": "apache", - "extensions": ["sdw","vor"] - }, - "application/vnd.stardivision.writer-global": { - "source": "apache", - "extensions": ["sgl"] - }, - "application/vnd.stepmania.package": { - "source": "iana", - "extensions": ["smzip"] - }, - "application/vnd.stepmania.stepchart": { - "source": "iana", - "extensions": ["sm"] - }, - "application/vnd.street-stream": { - "source": "iana" - }, - "application/vnd.sun.wadl+xml": { - "source": "iana", - "compressible": true, - "extensions": ["wadl"] - }, - "application/vnd.sun.xml.calc": { - "source": "apache", - "extensions": ["sxc"] - }, - "application/vnd.sun.xml.calc.template": { - "source": "apache", - "extensions": ["stc"] - }, - "application/vnd.sun.xml.draw": { - "source": "apache", - "extensions": ["sxd"] - }, - "application/vnd.sun.xml.draw.template": { - "source": "apache", - "extensions": ["std"] - }, - "application/vnd.sun.xml.impress": { - "source": "apache", - "extensions": ["sxi"] - }, - "application/vnd.sun.xml.impress.template": { - "source": "apache", - "extensions": ["sti"] - }, - "application/vnd.sun.xml.math": { - "source": "apache", - "extensions": ["sxm"] - }, - "application/vnd.sun.xml.writer": { - "source": "apache", - "extensions": ["sxw"] - }, - "application/vnd.sun.xml.writer.global": { - "source": "apache", - "extensions": ["sxg"] - }, - "application/vnd.sun.xml.writer.template": { - "source": "apache", - "extensions": ["stw"] - }, - "application/vnd.sus-calendar": { - "source": "iana", - "extensions": ["sus","susp"] - }, - "application/vnd.svd": { - "source": "iana", - "extensions": ["svd"] - }, - "application/vnd.swiftview-ics": { - "source": "iana" - }, - "application/vnd.symbian.install": { - "source": "apache", - "extensions": ["sis","sisx"] - }, - "application/vnd.syncml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xsm"] - }, - "application/vnd.syncml.dm+wbxml": { - "source": "iana", - "extensions": ["bdm"] - }, - "application/vnd.syncml.dm+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdm"] - }, - "application/vnd.syncml.dm.notification": { - "source": "iana" - }, - "application/vnd.syncml.dmddf+wbxml": { - "source": "iana" - }, - "application/vnd.syncml.dmddf+xml": { - "source": "iana", - "compressible": true, - "extensions": ["ddf"] - }, - "application/vnd.syncml.dmtnds+wbxml": { - "source": "iana" - }, - "application/vnd.syncml.dmtnds+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.syncml.ds.notification": { - "source": "iana" - }, - "application/vnd.tableschema+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.tao.intent-module-archive": { - "source": "iana", - "extensions": ["tao"] - }, - "application/vnd.tcpdump.pcap": { - "source": "iana", - "extensions": ["pcap","cap","dmp"] - }, - "application/vnd.think-cell.ppttc+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.tmd.mediaflex.api+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.tml": { - "source": "iana" - }, - "application/vnd.tmobile-livetv": { - "source": "iana", - "extensions": ["tmo"] - }, - "application/vnd.tri.onesource": { - "source": "iana" - }, - "application/vnd.trid.tpt": { - "source": "iana", - "extensions": ["tpt"] - }, - "application/vnd.triscape.mxs": { - "source": "iana", - "extensions": ["mxs"] - }, - "application/vnd.trueapp": { - "source": "iana", - "extensions": ["tra"] - }, - "application/vnd.truedoc": { - "source": "iana" - }, - "application/vnd.ubisoft.webplayer": { - "source": "iana" - }, - "application/vnd.ufdl": { - "source": "iana", - "extensions": ["ufd","ufdl"] - }, - "application/vnd.uiq.theme": { - "source": "iana", - "extensions": ["utz"] - }, - "application/vnd.umajin": { - "source": "iana", - "extensions": ["umj"] - }, - "application/vnd.unity": { - "source": "iana", - "extensions": ["unityweb"] - }, - "application/vnd.uoml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["uoml"] - }, - "application/vnd.uplanet.alert": { - "source": "iana" - }, - "application/vnd.uplanet.alert-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.bearer-choice": { - "source": "iana" - }, - "application/vnd.uplanet.bearer-choice-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.cacheop": { - "source": "iana" - }, - "application/vnd.uplanet.cacheop-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.channel": { - "source": "iana" - }, - "application/vnd.uplanet.channel-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.list": { - "source": "iana" - }, - "application/vnd.uplanet.list-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.listcmd": { - "source": "iana" - }, - "application/vnd.uplanet.listcmd-wbxml": { - "source": "iana" - }, - "application/vnd.uplanet.signal": { - "source": "iana" - }, - "application/vnd.uri-map": { - "source": "iana" - }, - "application/vnd.valve.source.material": { - "source": "iana" - }, - "application/vnd.vcx": { - "source": "iana", - "extensions": ["vcx"] - }, - "application/vnd.vd-study": { - "source": "iana" - }, - "application/vnd.vectorworks": { - "source": "iana" - }, - "application/vnd.vel+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.verimatrix.vcas": { - "source": "iana" - }, - "application/vnd.veryant.thin": { - "source": "iana" - }, - "application/vnd.ves.encrypted": { - "source": "iana" - }, - "application/vnd.vidsoft.vidconference": { - "source": "iana" - }, - "application/vnd.visio": { - "source": "iana", - "extensions": ["vsd","vst","vss","vsw"] - }, - "application/vnd.visionary": { - "source": "iana", - "extensions": ["vis"] - }, - "application/vnd.vividence.scriptfile": { - "source": "iana" - }, - "application/vnd.vsf": { - "source": "iana", - "extensions": ["vsf"] - }, - "application/vnd.wap.sic": { - "source": "iana" - }, - "application/vnd.wap.slc": { - "source": "iana" - }, - "application/vnd.wap.wbxml": { - "source": "iana", - "extensions": ["wbxml"] - }, - "application/vnd.wap.wmlc": { - "source": "iana", - "extensions": ["wmlc"] - }, - "application/vnd.wap.wmlscriptc": { - "source": "iana", - "extensions": ["wmlsc"] - }, - "application/vnd.webturbo": { - "source": "iana", - "extensions": ["wtb"] - }, - "application/vnd.wfa.p2p": { - "source": "iana" - }, - "application/vnd.wfa.wsc": { - "source": "iana" - }, - "application/vnd.windows.devicepairing": { - "source": "iana" - }, - "application/vnd.wmc": { - "source": "iana" - }, - "application/vnd.wmf.bootstrap": { - "source": "iana" - }, - "application/vnd.wolfram.mathematica": { - "source": "iana" - }, - "application/vnd.wolfram.mathematica.package": { - "source": "iana" - }, - "application/vnd.wolfram.player": { - "source": "iana", - "extensions": ["nbp"] - }, - "application/vnd.wordperfect": { - "source": "iana", - "extensions": ["wpd"] - }, - "application/vnd.wqd": { - "source": "iana", - "extensions": ["wqd"] - }, - "application/vnd.wrq-hp3000-labelled": { - "source": "iana" - }, - "application/vnd.wt.stf": { - "source": "iana", - "extensions": ["stf"] - }, - "application/vnd.wv.csp+wbxml": { - "source": "iana" - }, - "application/vnd.wv.csp+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.wv.ssp+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.xacml+json": { - "source": "iana", - "compressible": true - }, - "application/vnd.xara": { - "source": "iana", - "extensions": ["xar"] - }, - "application/vnd.xfdl": { - "source": "iana", - "extensions": ["xfdl"] - }, - "application/vnd.xfdl.webform": { - "source": "iana" - }, - "application/vnd.xmi+xml": { - "source": "iana", - "compressible": true - }, - "application/vnd.xmpie.cpkg": { - "source": "iana" - }, - "application/vnd.xmpie.dpkg": { - "source": "iana" - }, - "application/vnd.xmpie.plan": { - "source": "iana" - }, - "application/vnd.xmpie.ppkg": { - "source": "iana" - }, - "application/vnd.xmpie.xlim": { - "source": "iana" - }, - "application/vnd.yamaha.hv-dic": { - "source": "iana", - "extensions": ["hvd"] - }, - "application/vnd.yamaha.hv-script": { - "source": "iana", - "extensions": ["hvs"] - }, - "application/vnd.yamaha.hv-voice": { - "source": "iana", - "extensions": ["hvp"] - }, - "application/vnd.yamaha.openscoreformat": { - "source": "iana", - "extensions": ["osf"] - }, - "application/vnd.yamaha.openscoreformat.osfpvg+xml": { - "source": "iana", - "compressible": true, - "extensions": ["osfpvg"] - }, - "application/vnd.yamaha.remote-setup": { - "source": "iana" - }, - "application/vnd.yamaha.smaf-audio": { - "source": "iana", - "extensions": ["saf"] - }, - "application/vnd.yamaha.smaf-phrase": { - "source": "iana", - "extensions": ["spf"] - }, - "application/vnd.yamaha.through-ngn": { - "source": "iana" - }, - "application/vnd.yamaha.tunnel-udpencap": { - "source": "iana" - }, - "application/vnd.yaoweme": { - "source": "iana" - }, - "application/vnd.yellowriver-custom-menu": { - "source": "iana", - "extensions": ["cmp"] - }, - "application/vnd.youtube.yt": { - "source": "iana" - }, - "application/vnd.zul": { - "source": "iana", - "extensions": ["zir","zirz"] - }, - "application/vnd.zzazz.deck+xml": { - "source": "iana", - "compressible": true, - "extensions": ["zaz"] - }, - "application/voicexml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["vxml"] - }, - "application/voucher-cms+json": { - "source": "iana", - "compressible": true - }, - "application/vq-rtcpxr": { - "source": "iana" - }, - "application/wasm": { - "compressible": true, - "extensions": ["wasm"] - }, - "application/watcherinfo+xml": { - "source": "iana", - "compressible": true - }, - "application/webpush-options+json": { - "source": "iana", - "compressible": true - }, - "application/whoispp-query": { - "source": "iana" - }, - "application/whoispp-response": { - "source": "iana" - }, - "application/widget": { - "source": "iana", - "extensions": ["wgt"] - }, - "application/winhlp": { - "source": "apache", - "extensions": ["hlp"] - }, - "application/wita": { - "source": "iana" - }, - "application/wordperfect5.1": { - "source": "iana" - }, - "application/wsdl+xml": { - "source": "iana", - "compressible": true, - "extensions": ["wsdl"] - }, - "application/wspolicy+xml": { - "source": "iana", - "compressible": true, - "extensions": ["wspolicy"] - }, - "application/x-7z-compressed": { - "source": "apache", - "compressible": false, - "extensions": ["7z"] - }, - "application/x-abiword": { - "source": "apache", - "extensions": ["abw"] - }, - "application/x-ace-compressed": { - "source": "apache", - "extensions": ["ace"] - }, - "application/x-amf": { - "source": "apache" - }, - "application/x-apple-diskimage": { - "source": "apache", - "extensions": ["dmg"] - }, - "application/x-arj": { - "compressible": false, - "extensions": ["arj"] - }, - "application/x-authorware-bin": { - "source": "apache", - "extensions": ["aab","x32","u32","vox"] - }, - "application/x-authorware-map": { - "source": "apache", - "extensions": ["aam"] - }, - "application/x-authorware-seg": { - "source": "apache", - "extensions": ["aas"] - }, - "application/x-bcpio": { - "source": "apache", - "extensions": ["bcpio"] - }, - "application/x-bdoc": { - "compressible": false, - "extensions": ["bdoc"] - }, - "application/x-bittorrent": { - "source": "apache", - "extensions": ["torrent"] - }, - "application/x-blorb": { - "source": "apache", - "extensions": ["blb","blorb"] - }, - "application/x-bzip": { - "source": "apache", - "compressible": false, - "extensions": ["bz"] - }, - "application/x-bzip2": { - "source": "apache", - "compressible": false, - "extensions": ["bz2","boz"] - }, - "application/x-cbr": { - "source": "apache", - "extensions": ["cbr","cba","cbt","cbz","cb7"] - }, - "application/x-cdlink": { - "source": "apache", - "extensions": ["vcd"] - }, - "application/x-cfs-compressed": { - "source": "apache", - "extensions": ["cfs"] - }, - "application/x-chat": { - "source": "apache", - "extensions": ["chat"] - }, - "application/x-chess-pgn": { - "source": "apache", - "extensions": ["pgn"] - }, - "application/x-chrome-extension": { - "extensions": ["crx"] - }, - "application/x-cocoa": { - "source": "nginx", - "extensions": ["cco"] - }, - "application/x-compress": { - "source": "apache" - }, - "application/x-conference": { - "source": "apache", - "extensions": ["nsc"] - }, - "application/x-cpio": { - "source": "apache", - "extensions": ["cpio"] - }, - "application/x-csh": { - "source": "apache", - "extensions": ["csh"] - }, - "application/x-deb": { - "compressible": false - }, - "application/x-debian-package": { - "source": "apache", - "extensions": ["deb","udeb"] - }, - "application/x-dgc-compressed": { - "source": "apache", - "extensions": ["dgc"] - }, - "application/x-director": { - "source": "apache", - "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] - }, - "application/x-doom": { - "source": "apache", - "extensions": ["wad"] - }, - "application/x-dtbncx+xml": { - "source": "apache", - "compressible": true, - "extensions": ["ncx"] - }, - "application/x-dtbook+xml": { - "source": "apache", - "compressible": true, - "extensions": ["dtb"] - }, - "application/x-dtbresource+xml": { - "source": "apache", - "compressible": true, - "extensions": ["res"] - }, - "application/x-dvi": { - "source": "apache", - "compressible": false, - "extensions": ["dvi"] - }, - "application/x-envoy": { - "source": "apache", - "extensions": ["evy"] - }, - "application/x-eva": { - "source": "apache", - "extensions": ["eva"] - }, - "application/x-font-bdf": { - "source": "apache", - "extensions": ["bdf"] - }, - "application/x-font-dos": { - "source": "apache" - }, - "application/x-font-framemaker": { - "source": "apache" - }, - "application/x-font-ghostscript": { - "source": "apache", - "extensions": ["gsf"] - }, - "application/x-font-libgrx": { - "source": "apache" - }, - "application/x-font-linux-psf": { - "source": "apache", - "extensions": ["psf"] - }, - "application/x-font-pcf": { - "source": "apache", - "extensions": ["pcf"] - }, - "application/x-font-snf": { - "source": "apache", - "extensions": ["snf"] - }, - "application/x-font-speedo": { - "source": "apache" - }, - "application/x-font-sunos-news": { - "source": "apache" - }, - "application/x-font-type1": { - "source": "apache", - "extensions": ["pfa","pfb","pfm","afm"] - }, - "application/x-font-vfont": { - "source": "apache" - }, - "application/x-freearc": { - "source": "apache", - "extensions": ["arc"] - }, - "application/x-futuresplash": { - "source": "apache", - "extensions": ["spl"] - }, - "application/x-gca-compressed": { - "source": "apache", - "extensions": ["gca"] - }, - "application/x-glulx": { - "source": "apache", - "extensions": ["ulx"] - }, - "application/x-gnumeric": { - "source": "apache", - "extensions": ["gnumeric"] - }, - "application/x-gramps-xml": { - "source": "apache", - "extensions": ["gramps"] - }, - "application/x-gtar": { - "source": "apache", - "extensions": ["gtar"] - }, - "application/x-gzip": { - "source": "apache" - }, - "application/x-hdf": { - "source": "apache", - "extensions": ["hdf"] - }, - "application/x-httpd-php": { - "compressible": true, - "extensions": ["php"] - }, - "application/x-install-instructions": { - "source": "apache", - "extensions": ["install"] - }, - "application/x-iso9660-image": { - "source": "apache", - "extensions": ["iso"] - }, - "application/x-java-archive-diff": { - "source": "nginx", - "extensions": ["jardiff"] - }, - "application/x-java-jnlp-file": { - "source": "apache", - "compressible": false, - "extensions": ["jnlp"] - }, - "application/x-javascript": { - "compressible": true - }, - "application/x-keepass2": { - "extensions": ["kdbx"] - }, - "application/x-latex": { - "source": "apache", - "compressible": false, - "extensions": ["latex"] - }, - "application/x-lua-bytecode": { - "extensions": ["luac"] - }, - "application/x-lzh-compressed": { - "source": "apache", - "extensions": ["lzh","lha"] - }, - "application/x-makeself": { - "source": "nginx", - "extensions": ["run"] - }, - "application/x-mie": { - "source": "apache", - "extensions": ["mie"] - }, - "application/x-mobipocket-ebook": { - "source": "apache", - "extensions": ["prc","mobi"] - }, - "application/x-mpegurl": { - "compressible": false - }, - "application/x-ms-application": { - "source": "apache", - "extensions": ["application"] - }, - "application/x-ms-shortcut": { - "source": "apache", - "extensions": ["lnk"] - }, - "application/x-ms-wmd": { - "source": "apache", - "extensions": ["wmd"] - }, - "application/x-ms-wmz": { - "source": "apache", - "extensions": ["wmz"] - }, - "application/x-ms-xbap": { - "source": "apache", - "extensions": ["xbap"] - }, - "application/x-msaccess": { - "source": "apache", - "extensions": ["mdb"] - }, - "application/x-msbinder": { - "source": "apache", - "extensions": ["obd"] - }, - "application/x-mscardfile": { - "source": "apache", - "extensions": ["crd"] - }, - "application/x-msclip": { - "source": "apache", - "extensions": ["clp"] - }, - "application/x-msdos-program": { - "extensions": ["exe"] - }, - "application/x-msdownload": { - "source": "apache", - "extensions": ["exe","dll","com","bat","msi"] - }, - "application/x-msmediaview": { - "source": "apache", - "extensions": ["mvb","m13","m14"] - }, - "application/x-msmetafile": { - "source": "apache", - "extensions": ["wmf","wmz","emf","emz"] - }, - "application/x-msmoney": { - "source": "apache", - "extensions": ["mny"] - }, - "application/x-mspublisher": { - "source": "apache", - "extensions": ["pub"] - }, - "application/x-msschedule": { - "source": "apache", - "extensions": ["scd"] - }, - "application/x-msterminal": { - "source": "apache", - "extensions": ["trm"] - }, - "application/x-mswrite": { - "source": "apache", - "extensions": ["wri"] - }, - "application/x-netcdf": { - "source": "apache", - "extensions": ["nc","cdf"] - }, - "application/x-ns-proxy-autoconfig": { - "compressible": true, - "extensions": ["pac"] - }, - "application/x-nzb": { - "source": "apache", - "extensions": ["nzb"] - }, - "application/x-perl": { - "source": "nginx", - "extensions": ["pl","pm"] - }, - "application/x-pilot": { - "source": "nginx", - "extensions": ["prc","pdb"] - }, - "application/x-pkcs12": { - "source": "apache", - "compressible": false, - "extensions": ["p12","pfx"] - }, - "application/x-pkcs7-certificates": { - "source": "apache", - "extensions": ["p7b","spc"] - }, - "application/x-pkcs7-certreqresp": { - "source": "apache", - "extensions": ["p7r"] - }, - "application/x-rar-compressed": { - "source": "apache", - "compressible": false, - "extensions": ["rar"] - }, - "application/x-redhat-package-manager": { - "source": "nginx", - "extensions": ["rpm"] - }, - "application/x-research-info-systems": { - "source": "apache", - "extensions": ["ris"] - }, - "application/x-sea": { - "source": "nginx", - "extensions": ["sea"] - }, - "application/x-sh": { - "source": "apache", - "compressible": true, - "extensions": ["sh"] - }, - "application/x-shar": { - "source": "apache", - "extensions": ["shar"] - }, - "application/x-shockwave-flash": { - "source": "apache", - "compressible": false, - "extensions": ["swf"] - }, - "application/x-silverlight-app": { - "source": "apache", - "extensions": ["xap"] - }, - "application/x-sql": { - "source": "apache", - "extensions": ["sql"] - }, - "application/x-stuffit": { - "source": "apache", - "compressible": false, - "extensions": ["sit"] - }, - "application/x-stuffitx": { - "source": "apache", - "extensions": ["sitx"] - }, - "application/x-subrip": { - "source": "apache", - "extensions": ["srt"] - }, - "application/x-sv4cpio": { - "source": "apache", - "extensions": ["sv4cpio"] - }, - "application/x-sv4crc": { - "source": "apache", - "extensions": ["sv4crc"] - }, - "application/x-t3vm-image": { - "source": "apache", - "extensions": ["t3"] - }, - "application/x-tads": { - "source": "apache", - "extensions": ["gam"] - }, - "application/x-tar": { - "source": "apache", - "compressible": true, - "extensions": ["tar"] - }, - "application/x-tcl": { - "source": "apache", - "extensions": ["tcl","tk"] - }, - "application/x-tex": { - "source": "apache", - "extensions": ["tex"] - }, - "application/x-tex-tfm": { - "source": "apache", - "extensions": ["tfm"] - }, - "application/x-texinfo": { - "source": "apache", - "extensions": ["texinfo","texi"] - }, - "application/x-tgif": { - "source": "apache", - "extensions": ["obj"] - }, - "application/x-ustar": { - "source": "apache", - "extensions": ["ustar"] - }, - "application/x-virtualbox-hdd": { - "compressible": true, - "extensions": ["hdd"] - }, - "application/x-virtualbox-ova": { - "compressible": true, - "extensions": ["ova"] - }, - "application/x-virtualbox-ovf": { - "compressible": true, - "extensions": ["ovf"] - }, - "application/x-virtualbox-vbox": { - "compressible": true, - "extensions": ["vbox"] - }, - "application/x-virtualbox-vbox-extpack": { - "compressible": false, - "extensions": ["vbox-extpack"] - }, - "application/x-virtualbox-vdi": { - "compressible": true, - "extensions": ["vdi"] - }, - "application/x-virtualbox-vhd": { - "compressible": true, - "extensions": ["vhd"] - }, - "application/x-virtualbox-vmdk": { - "compressible": true, - "extensions": ["vmdk"] - }, - "application/x-wais-source": { - "source": "apache", - "extensions": ["src"] - }, - "application/x-web-app-manifest+json": { - "compressible": true, - "extensions": ["webapp"] - }, - "application/x-www-form-urlencoded": { - "source": "iana", - "compressible": true - }, - "application/x-x509-ca-cert": { - "source": "apache", - "extensions": ["der","crt","pem"] - }, - "application/x-xfig": { - "source": "apache", - "extensions": ["fig"] - }, - "application/x-xliff+xml": { - "source": "apache", - "compressible": true, - "extensions": ["xlf"] - }, - "application/x-xpinstall": { - "source": "apache", - "compressible": false, - "extensions": ["xpi"] - }, - "application/x-xz": { - "source": "apache", - "extensions": ["xz"] - }, - "application/x-zmachine": { - "source": "apache", - "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] - }, - "application/x400-bp": { - "source": "iana" - }, - "application/xacml+xml": { - "source": "iana", - "compressible": true - }, - "application/xaml+xml": { - "source": "apache", - "compressible": true, - "extensions": ["xaml"] - }, - "application/xcap-att+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xav"] - }, - "application/xcap-caps+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xca"] - }, - "application/xcap-diff+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xdf"] - }, - "application/xcap-el+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xel"] - }, - "application/xcap-error+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xer"] - }, - "application/xcap-ns+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xns"] - }, - "application/xcon-conference-info+xml": { - "source": "iana", - "compressible": true - }, - "application/xcon-conference-info-diff+xml": { - "source": "iana", - "compressible": true - }, - "application/xenc+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xenc"] - }, - "application/xhtml+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xhtml","xht"] - }, - "application/xhtml-voice+xml": { - "source": "apache", - "compressible": true - }, - "application/xliff+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xlf"] - }, - "application/xml": { - "source": "iana", - "compressible": true, - "extensions": ["xml","xsl","xsd","rng"] - }, - "application/xml-dtd": { - "source": "iana", - "compressible": true, - "extensions": ["dtd"] - }, - "application/xml-external-parsed-entity": { - "source": "iana" - }, - "application/xml-patch+xml": { - "source": "iana", - "compressible": true - }, - "application/xmpp+xml": { - "source": "iana", - "compressible": true - }, - "application/xop+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xop"] - }, - "application/xproc+xml": { - "source": "apache", - "compressible": true, - "extensions": ["xpl"] - }, - "application/xslt+xml": { - "source": "iana", - "compressible": true, - "extensions": ["xslt"] - }, - "application/xspf+xml": { - "source": "apache", - "compressible": true, - "extensions": ["xspf"] - }, - "application/xv+xml": { - "source": "iana", - "compressible": true, - "extensions": ["mxml","xhvml","xvml","xvm"] - }, - "application/yang": { - "source": "iana", - "extensions": ["yang"] - }, - "application/yang-data+json": { - "source": "iana", - "compressible": true - }, - "application/yang-data+xml": { - "source": "iana", - "compressible": true - }, - "application/yang-patch+json": { - "source": "iana", - "compressible": true - }, - "application/yang-patch+xml": { - "source": "iana", - "compressible": true - }, - "application/yin+xml": { - "source": "iana", - "compressible": true, - "extensions": ["yin"] - }, - "application/zip": { - "source": "iana", - "compressible": false, - "extensions": ["zip"] - }, - "application/zlib": { - "source": "iana" - }, - "application/zstd": { - "source": "iana" - }, - "audio/1d-interleaved-parityfec": { - "source": "iana" - }, - "audio/32kadpcm": { - "source": "iana" - }, - "audio/3gpp": { - "source": "iana", - "compressible": false, - "extensions": ["3gpp"] - }, - "audio/3gpp2": { - "source": "iana" - }, - "audio/aac": { - "source": "iana" - }, - "audio/ac3": { - "source": "iana" - }, - "audio/adpcm": { - "source": "apache", - "extensions": ["adp"] - }, - "audio/amr": { - "source": "iana" - }, - "audio/amr-wb": { - "source": "iana" - }, - "audio/amr-wb+": { - "source": "iana" - }, - "audio/aptx": { - "source": "iana" - }, - "audio/asc": { - "source": "iana" - }, - "audio/atrac-advanced-lossless": { - "source": "iana" - }, - "audio/atrac-x": { - "source": "iana" - }, - "audio/atrac3": { - "source": "iana" - }, - "audio/basic": { - "source": "iana", - "compressible": false, - "extensions": ["au","snd"] - }, - "audio/bv16": { - "source": "iana" - }, - "audio/bv32": { - "source": "iana" - }, - "audio/clearmode": { - "source": "iana" - }, - "audio/cn": { - "source": "iana" - }, - "audio/dat12": { - "source": "iana" - }, - "audio/dls": { - "source": "iana" - }, - "audio/dsr-es201108": { - "source": "iana" - }, - "audio/dsr-es202050": { - "source": "iana" - }, - "audio/dsr-es202211": { - "source": "iana" - }, - "audio/dsr-es202212": { - "source": "iana" - }, - "audio/dv": { - "source": "iana" - }, - "audio/dvi4": { - "source": "iana" - }, - "audio/eac3": { - "source": "iana" - }, - "audio/encaprtp": { - "source": "iana" - }, - "audio/evrc": { - "source": "iana" - }, - "audio/evrc-qcp": { - "source": "iana" - }, - "audio/evrc0": { - "source": "iana" - }, - "audio/evrc1": { - "source": "iana" - }, - "audio/evrcb": { - "source": "iana" - }, - "audio/evrcb0": { - "source": "iana" - }, - "audio/evrcb1": { - "source": "iana" - }, - "audio/evrcnw": { - "source": "iana" - }, - "audio/evrcnw0": { - "source": "iana" - }, - "audio/evrcnw1": { - "source": "iana" - }, - "audio/evrcwb": { - "source": "iana" - }, - "audio/evrcwb0": { - "source": "iana" - }, - "audio/evrcwb1": { - "source": "iana" - }, - "audio/evs": { - "source": "iana" - }, - "audio/flexfec": { - "source": "iana" - }, - "audio/fwdred": { - "source": "iana" - }, - "audio/g711-0": { - "source": "iana" - }, - "audio/g719": { - "source": "iana" - }, - "audio/g722": { - "source": "iana" - }, - "audio/g7221": { - "source": "iana" - }, - "audio/g723": { - "source": "iana" - }, - "audio/g726-16": { - "source": "iana" - }, - "audio/g726-24": { - "source": "iana" - }, - "audio/g726-32": { - "source": "iana" - }, - "audio/g726-40": { - "source": "iana" - }, - "audio/g728": { - "source": "iana" - }, - "audio/g729": { - "source": "iana" - }, - "audio/g7291": { - "source": "iana" - }, - "audio/g729d": { - "source": "iana" - }, - "audio/g729e": { - "source": "iana" - }, - "audio/gsm": { - "source": "iana" - }, - "audio/gsm-efr": { - "source": "iana" - }, - "audio/gsm-hr-08": { - "source": "iana" - }, - "audio/ilbc": { - "source": "iana" - }, - "audio/ip-mr_v2.5": { - "source": "iana" - }, - "audio/isac": { - "source": "apache" - }, - "audio/l16": { - "source": "iana" - }, - "audio/l20": { - "source": "iana" - }, - "audio/l24": { - "source": "iana", - "compressible": false - }, - "audio/l8": { - "source": "iana" - }, - "audio/lpc": { - "source": "iana" - }, - "audio/melp": { - "source": "iana" - }, - "audio/melp1200": { - "source": "iana" - }, - "audio/melp2400": { - "source": "iana" - }, - "audio/melp600": { - "source": "iana" - }, - "audio/midi": { - "source": "apache", - "extensions": ["mid","midi","kar","rmi"] - }, - "audio/mobile-xmf": { - "source": "iana", - "extensions": ["mxmf"] - }, - "audio/mp3": { - "compressible": false, - "extensions": ["mp3"] - }, - "audio/mp4": { - "source": "iana", - "compressible": false, - "extensions": ["m4a","mp4a"] - }, - "audio/mp4a-latm": { - "source": "iana" - }, - "audio/mpa": { - "source": "iana" - }, - "audio/mpa-robust": { - "source": "iana" - }, - "audio/mpeg": { - "source": "iana", - "compressible": false, - "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] - }, - "audio/mpeg4-generic": { - "source": "iana" - }, - "audio/musepack": { - "source": "apache" - }, - "audio/ogg": { - "source": "iana", - "compressible": false, - "extensions": ["oga","ogg","spx"] - }, - "audio/opus": { - "source": "iana" - }, - "audio/parityfec": { - "source": "iana" - }, - "audio/pcma": { - "source": "iana" - }, - "audio/pcma-wb": { - "source": "iana" - }, - "audio/pcmu": { - "source": "iana" - }, - "audio/pcmu-wb": { - "source": "iana" - }, - "audio/prs.sid": { - "source": "iana" - }, - "audio/qcelp": { - "source": "iana" - }, - "audio/raptorfec": { - "source": "iana" - }, - "audio/red": { - "source": "iana" - }, - "audio/rtp-enc-aescm128": { - "source": "iana" - }, - "audio/rtp-midi": { - "source": "iana" - }, - "audio/rtploopback": { - "source": "iana" - }, - "audio/rtx": { - "source": "iana" - }, - "audio/s3m": { - "source": "apache", - "extensions": ["s3m"] - }, - "audio/silk": { - "source": "apache", - "extensions": ["sil"] - }, - "audio/smv": { - "source": "iana" - }, - "audio/smv-qcp": { - "source": "iana" - }, - "audio/smv0": { - "source": "iana" - }, - "audio/sp-midi": { - "source": "iana" - }, - "audio/speex": { - "source": "iana" - }, - "audio/t140c": { - "source": "iana" - }, - "audio/t38": { - "source": "iana" - }, - "audio/telephone-event": { - "source": "iana" - }, - "audio/tetra_acelp": { - "source": "iana" - }, - "audio/tone": { - "source": "iana" - }, - "audio/uemclip": { - "source": "iana" - }, - "audio/ulpfec": { - "source": "iana" - }, - "audio/usac": { - "source": "iana" - }, - "audio/vdvi": { - "source": "iana" - }, - "audio/vmr-wb": { - "source": "iana" - }, - "audio/vnd.3gpp.iufp": { - "source": "iana" - }, - "audio/vnd.4sb": { - "source": "iana" - }, - "audio/vnd.audiokoz": { - "source": "iana" - }, - "audio/vnd.celp": { - "source": "iana" - }, - "audio/vnd.cisco.nse": { - "source": "iana" - }, - "audio/vnd.cmles.radio-events": { - "source": "iana" - }, - "audio/vnd.cns.anp1": { - "source": "iana" - }, - "audio/vnd.cns.inf1": { - "source": "iana" - }, - "audio/vnd.dece.audio": { - "source": "iana", - "extensions": ["uva","uvva"] - }, - "audio/vnd.digital-winds": { - "source": "iana", - "extensions": ["eol"] - }, - "audio/vnd.dlna.adts": { - "source": "iana" - }, - "audio/vnd.dolby.heaac.1": { - "source": "iana" - }, - "audio/vnd.dolby.heaac.2": { - "source": "iana" - }, - "audio/vnd.dolby.mlp": { - "source": "iana" - }, - "audio/vnd.dolby.mps": { - "source": "iana" - }, - "audio/vnd.dolby.pl2": { - "source": "iana" - }, - "audio/vnd.dolby.pl2x": { - "source": "iana" - }, - "audio/vnd.dolby.pl2z": { - "source": "iana" - }, - "audio/vnd.dolby.pulse.1": { - "source": "iana" - }, - "audio/vnd.dra": { - "source": "iana", - "extensions": ["dra"] - }, - "audio/vnd.dts": { - "source": "iana", - "extensions": ["dts"] - }, - "audio/vnd.dts.hd": { - "source": "iana", - "extensions": ["dtshd"] - }, - "audio/vnd.dts.uhd": { - "source": "iana" - }, - "audio/vnd.dvb.file": { - "source": "iana" - }, - "audio/vnd.everad.plj": { - "source": "iana" - }, - "audio/vnd.hns.audio": { - "source": "iana" - }, - "audio/vnd.lucent.voice": { - "source": "iana", - "extensions": ["lvp"] - }, - "audio/vnd.ms-playready.media.pya": { - "source": "iana", - "extensions": ["pya"] - }, - "audio/vnd.nokia.mobile-xmf": { - "source": "iana" - }, - "audio/vnd.nortel.vbk": { - "source": "iana" - }, - "audio/vnd.nuera.ecelp4800": { - "source": "iana", - "extensions": ["ecelp4800"] - }, - "audio/vnd.nuera.ecelp7470": { - "source": "iana", - "extensions": ["ecelp7470"] - }, - "audio/vnd.nuera.ecelp9600": { - "source": "iana", - "extensions": ["ecelp9600"] - }, - "audio/vnd.octel.sbc": { - "source": "iana" - }, - "audio/vnd.presonus.multitrack": { - "source": "iana" - }, - "audio/vnd.qcelp": { - "source": "iana" - }, - "audio/vnd.rhetorex.32kadpcm": { - "source": "iana" - }, - "audio/vnd.rip": { - "source": "iana", - "extensions": ["rip"] - }, - "audio/vnd.rn-realaudio": { - "compressible": false - }, - "audio/vnd.sealedmedia.softseal.mpeg": { - "source": "iana" - }, - "audio/vnd.vmx.cvsd": { - "source": "iana" - }, - "audio/vnd.wave": { - "compressible": false - }, - "audio/vorbis": { - "source": "iana", - "compressible": false - }, - "audio/vorbis-config": { - "source": "iana" - }, - "audio/wav": { - "compressible": false, - "extensions": ["wav"] - }, - "audio/wave": { - "compressible": false, - "extensions": ["wav"] - }, - "audio/webm": { - "source": "apache", - "compressible": false, - "extensions": ["weba"] - }, - "audio/x-aac": { - "source": "apache", - "compressible": false, - "extensions": ["aac"] - }, - "audio/x-aiff": { - "source": "apache", - "extensions": ["aif","aiff","aifc"] - }, - "audio/x-caf": { - "source": "apache", - "compressible": false, - "extensions": ["caf"] - }, - "audio/x-flac": { - "source": "apache", - "extensions": ["flac"] - }, - "audio/x-m4a": { - "source": "nginx", - "extensions": ["m4a"] - }, - "audio/x-matroska": { - "source": "apache", - "extensions": ["mka"] - }, - "audio/x-mpegurl": { - "source": "apache", - "extensions": ["m3u"] - }, - "audio/x-ms-wax": { - "source": "apache", - "extensions": ["wax"] - }, - "audio/x-ms-wma": { - "source": "apache", - "extensions": ["wma"] - }, - "audio/x-pn-realaudio": { - "source": "apache", - "extensions": ["ram","ra"] - }, - "audio/x-pn-realaudio-plugin": { - "source": "apache", - "extensions": ["rmp"] - }, - "audio/x-realaudio": { - "source": "nginx", - "extensions": ["ra"] - }, - "audio/x-tta": { - "source": "apache" - }, - "audio/x-wav": { - "source": "apache", - "extensions": ["wav"] - }, - "audio/xm": { - "source": "apache", - "extensions": ["xm"] - }, - "chemical/x-cdx": { - "source": "apache", - "extensions": ["cdx"] - }, - "chemical/x-cif": { - "source": "apache", - "extensions": ["cif"] - }, - "chemical/x-cmdf": { - "source": "apache", - "extensions": ["cmdf"] - }, - "chemical/x-cml": { - "source": "apache", - "extensions": ["cml"] - }, - "chemical/x-csml": { - "source": "apache", - "extensions": ["csml"] - }, - "chemical/x-pdb": { - "source": "apache" - }, - "chemical/x-xyz": { - "source": "apache", - "extensions": ["xyz"] - }, - "font/collection": { - "source": "iana", - "extensions": ["ttc"] - }, - "font/otf": { - "source": "iana", - "compressible": true, - "extensions": ["otf"] - }, - "font/sfnt": { - "source": "iana" - }, - "font/ttf": { - "source": "iana", - "compressible": true, - "extensions": ["ttf"] - }, - "font/woff": { - "source": "iana", - "extensions": ["woff"] - }, - "font/woff2": { - "source": "iana", - "extensions": ["woff2"] - }, - "image/aces": { - "source": "iana", - "extensions": ["exr"] - }, - "image/apng": { - "compressible": false, - "extensions": ["apng"] - }, - "image/avci": { - "source": "iana" - }, - "image/avcs": { - "source": "iana" - }, - "image/bmp": { - "source": "iana", - "compressible": true, - "extensions": ["bmp"] - }, - "image/cgm": { - "source": "iana", - "extensions": ["cgm"] - }, - "image/dicom-rle": { - "source": "iana", - "extensions": ["drle"] - }, - "image/emf": { - "source": "iana", - "extensions": ["emf"] - }, - "image/fits": { - "source": "iana", - "extensions": ["fits"] - }, - "image/g3fax": { - "source": "iana", - "extensions": ["g3"] - }, - "image/gif": { - "source": "iana", - "compressible": false, - "extensions": ["gif"] - }, - "image/heic": { - "source": "iana", - "extensions": ["heic"] - }, - "image/heic-sequence": { - "source": "iana", - "extensions": ["heics"] - }, - "image/heif": { - "source": "iana", - "extensions": ["heif"] - }, - "image/heif-sequence": { - "source": "iana", - "extensions": ["heifs"] - }, - "image/hej2k": { - "source": "iana", - "extensions": ["hej2"] - }, - "image/hsj2": { - "source": "iana", - "extensions": ["hsj2"] - }, - "image/ief": { - "source": "iana", - "extensions": ["ief"] - }, - "image/jls": { - "source": "iana", - "extensions": ["jls"] - }, - "image/jp2": { - "source": "iana", - "compressible": false, - "extensions": ["jp2","jpg2"] - }, - "image/jpeg": { - "source": "iana", - "compressible": false, - "extensions": ["jpeg","jpg","jpe"] - }, - "image/jph": { - "source": "iana", - "extensions": ["jph"] - }, - "image/jphc": { - "source": "iana", - "extensions": ["jhc"] - }, - "image/jpm": { - "source": "iana", - "compressible": false, - "extensions": ["jpm"] - }, - "image/jpx": { - "source": "iana", - "compressible": false, - "extensions": ["jpx","jpf"] - }, - "image/jxr": { - "source": "iana", - "extensions": ["jxr"] - }, - "image/jxra": { - "source": "iana", - "extensions": ["jxra"] - }, - "image/jxrs": { - "source": "iana", - "extensions": ["jxrs"] - }, - "image/jxs": { - "source": "iana", - "extensions": ["jxs"] - }, - "image/jxsc": { - "source": "iana", - "extensions": ["jxsc"] - }, - "image/jxsi": { - "source": "iana", - "extensions": ["jxsi"] - }, - "image/jxss": { - "source": "iana", - "extensions": ["jxss"] - }, - "image/ktx": { - "source": "iana", - "extensions": ["ktx"] - }, - "image/naplps": { - "source": "iana" - }, - "image/pjpeg": { - "compressible": false - }, - "image/png": { - "source": "iana", - "compressible": false, - "extensions": ["png"] - }, - "image/prs.btif": { - "source": "iana", - "extensions": ["btif"] - }, - "image/prs.pti": { - "source": "iana", - "extensions": ["pti"] - }, - "image/pwg-raster": { - "source": "iana" - }, - "image/sgi": { - "source": "apache", - "extensions": ["sgi"] - }, - "image/svg+xml": { - "source": "iana", - "compressible": true, - "extensions": ["svg","svgz"] - }, - "image/t38": { - "source": "iana", - "extensions": ["t38"] - }, - "image/tiff": { - "source": "iana", - "compressible": false, - "extensions": ["tif","tiff"] - }, - "image/tiff-fx": { - "source": "iana", - "extensions": ["tfx"] - }, - "image/vnd.adobe.photoshop": { - "source": "iana", - "compressible": true, - "extensions": ["psd"] - }, - "image/vnd.airzip.accelerator.azv": { - "source": "iana", - "extensions": ["azv"] - }, - "image/vnd.cns.inf2": { - "source": "iana" - }, - "image/vnd.dece.graphic": { - "source": "iana", - "extensions": ["uvi","uvvi","uvg","uvvg"] - }, - "image/vnd.djvu": { - "source": "iana", - "extensions": ["djvu","djv"] - }, - "image/vnd.dvb.subtitle": { - "source": "iana", - "extensions": ["sub"] - }, - "image/vnd.dwg": { - "source": "iana", - "extensions": ["dwg"] - }, - "image/vnd.dxf": { - "source": "iana", - "extensions": ["dxf"] - }, - "image/vnd.fastbidsheet": { - "source": "iana", - "extensions": ["fbs"] - }, - "image/vnd.fpx": { - "source": "iana", - "extensions": ["fpx"] - }, - "image/vnd.fst": { - "source": "iana", - "extensions": ["fst"] - }, - "image/vnd.fujixerox.edmics-mmr": { - "source": "iana", - "extensions": ["mmr"] - }, - "image/vnd.fujixerox.edmics-rlc": { - "source": "iana", - "extensions": ["rlc"] - }, - "image/vnd.globalgraphics.pgb": { - "source": "iana" - }, - "image/vnd.microsoft.icon": { - "source": "iana", - "extensions": ["ico"] - }, - "image/vnd.mix": { - "source": "iana" - }, - "image/vnd.mozilla.apng": { - "source": "iana" - }, - "image/vnd.ms-dds": { - "extensions": ["dds"] - }, - "image/vnd.ms-modi": { - "source": "iana", - "extensions": ["mdi"] - }, - "image/vnd.ms-photo": { - "source": "apache", - "extensions": ["wdp"] - }, - "image/vnd.net-fpx": { - "source": "iana", - "extensions": ["npx"] - }, - "image/vnd.radiance": { - "source": "iana" - }, - "image/vnd.sealed.png": { - "source": "iana" - }, - "image/vnd.sealedmedia.softseal.gif": { - "source": "iana" - }, - "image/vnd.sealedmedia.softseal.jpg": { - "source": "iana" - }, - "image/vnd.svf": { - "source": "iana" - }, - "image/vnd.tencent.tap": { - "source": "iana", - "extensions": ["tap"] - }, - "image/vnd.valve.source.texture": { - "source": "iana", - "extensions": ["vtf"] - }, - "image/vnd.wap.wbmp": { - "source": "iana", - "extensions": ["wbmp"] - }, - "image/vnd.xiff": { - "source": "iana", - "extensions": ["xif"] - }, - "image/vnd.zbrush.pcx": { - "source": "iana", - "extensions": ["pcx"] - }, - "image/webp": { - "source": "apache", - "extensions": ["webp"] - }, - "image/wmf": { - "source": "iana", - "extensions": ["wmf"] - }, - "image/x-3ds": { - "source": "apache", - "extensions": ["3ds"] - }, - "image/x-cmu-raster": { - "source": "apache", - "extensions": ["ras"] - }, - "image/x-cmx": { - "source": "apache", - "extensions": ["cmx"] - }, - "image/x-freehand": { - "source": "apache", - "extensions": ["fh","fhc","fh4","fh5","fh7"] - }, - "image/x-icon": { - "source": "apache", - "compressible": true, - "extensions": ["ico"] - }, - "image/x-jng": { - "source": "nginx", - "extensions": ["jng"] - }, - "image/x-mrsid-image": { - "source": "apache", - "extensions": ["sid"] - }, - "image/x-ms-bmp": { - "source": "nginx", - "compressible": true, - "extensions": ["bmp"] - }, - "image/x-pcx": { - "source": "apache", - "extensions": ["pcx"] - }, - "image/x-pict": { - "source": "apache", - "extensions": ["pic","pct"] - }, - "image/x-portable-anymap": { - "source": "apache", - "extensions": ["pnm"] - }, - "image/x-portable-bitmap": { - "source": "apache", - "extensions": ["pbm"] - }, - "image/x-portable-graymap": { - "source": "apache", - "extensions": ["pgm"] - }, - "image/x-portable-pixmap": { - "source": "apache", - "extensions": ["ppm"] - }, - "image/x-rgb": { - "source": "apache", - "extensions": ["rgb"] - }, - "image/x-tga": { - "source": "apache", - "extensions": ["tga"] - }, - "image/x-xbitmap": { - "source": "apache", - "extensions": ["xbm"] - }, - "image/x-xcf": { - "compressible": false - }, - "image/x-xpixmap": { - "source": "apache", - "extensions": ["xpm"] - }, - "image/x-xwindowdump": { - "source": "apache", - "extensions": ["xwd"] - }, - "message/cpim": { - "source": "iana" - }, - "message/delivery-status": { - "source": "iana" - }, - "message/disposition-notification": { - "source": "iana", - "extensions": [ - "disposition-notification" - ] - }, - "message/external-body": { - "source": "iana" - }, - "message/feedback-report": { - "source": "iana" - }, - "message/global": { - "source": "iana", - "extensions": ["u8msg"] - }, - "message/global-delivery-status": { - "source": "iana", - "extensions": ["u8dsn"] - }, - "message/global-disposition-notification": { - "source": "iana", - "extensions": ["u8mdn"] - }, - "message/global-headers": { - "source": "iana", - "extensions": ["u8hdr"] - }, - "message/http": { - "source": "iana", - "compressible": false - }, - "message/imdn+xml": { - "source": "iana", - "compressible": true - }, - "message/news": { - "source": "iana" - }, - "message/partial": { - "source": "iana", - "compressible": false - }, - "message/rfc822": { - "source": "iana", - "compressible": true, - "extensions": ["eml","mime"] - }, - "message/s-http": { - "source": "iana" - }, - "message/sip": { - "source": "iana" - }, - "message/sipfrag": { - "source": "iana" - }, - "message/tracking-status": { - "source": "iana" - }, - "message/vnd.si.simp": { - "source": "iana" - }, - "message/vnd.wfa.wsc": { - "source": "iana", - "extensions": ["wsc"] - }, - "model/3mf": { - "source": "iana", - "extensions": ["3mf"] - }, - "model/gltf+json": { - "source": "iana", - "compressible": true, - "extensions": ["gltf"] - }, - "model/gltf-binary": { - "source": "iana", - "compressible": true, - "extensions": ["glb"] - }, - "model/iges": { - "source": "iana", - "compressible": false, - "extensions": ["igs","iges"] - }, - "model/mesh": { - "source": "iana", - "compressible": false, - "extensions": ["msh","mesh","silo"] - }, - "model/stl": { - "source": "iana", - "extensions": ["stl"] - }, - "model/vnd.collada+xml": { - "source": "iana", - "compressible": true, - "extensions": ["dae"] - }, - "model/vnd.dwf": { - "source": "iana", - "extensions": ["dwf"] - }, - "model/vnd.flatland.3dml": { - "source": "iana" - }, - "model/vnd.gdl": { - "source": "iana", - "extensions": ["gdl"] - }, - "model/vnd.gs-gdl": { - "source": "apache" - }, - "model/vnd.gs.gdl": { - "source": "iana" - }, - "model/vnd.gtw": { - "source": "iana", - "extensions": ["gtw"] - }, - "model/vnd.moml+xml": { - "source": "iana", - "compressible": true - }, - "model/vnd.mts": { - "source": "iana", - "extensions": ["mts"] - }, - "model/vnd.opengex": { - "source": "iana", - "extensions": ["ogex"] - }, - "model/vnd.parasolid.transmit.binary": { - "source": "iana", - "extensions": ["x_b"] - }, - "model/vnd.parasolid.transmit.text": { - "source": "iana", - "extensions": ["x_t"] - }, - "model/vnd.rosette.annotated-data-model": { - "source": "iana" - }, - "model/vnd.usdz+zip": { - "source": "iana", - "compressible": false, - "extensions": ["usdz"] - }, - "model/vnd.valve.source.compiled-map": { - "source": "iana", - "extensions": ["bsp"] - }, - "model/vnd.vtu": { - "source": "iana", - "extensions": ["vtu"] - }, - "model/vrml": { - "source": "iana", - "compressible": false, - "extensions": ["wrl","vrml"] - }, - "model/x3d+binary": { - "source": "apache", - "compressible": false, - "extensions": ["x3db","x3dbz"] - }, - "model/x3d+fastinfoset": { - "source": "iana", - "extensions": ["x3db"] - }, - "model/x3d+vrml": { - "source": "apache", - "compressible": false, - "extensions": ["x3dv","x3dvz"] - }, - "model/x3d+xml": { - "source": "iana", - "compressible": true, - "extensions": ["x3d","x3dz"] - }, - "model/x3d-vrml": { - "source": "iana", - "extensions": ["x3dv"] - }, - "multipart/alternative": { - "source": "iana", - "compressible": false - }, - "multipart/appledouble": { - "source": "iana" - }, - "multipart/byteranges": { - "source": "iana" - }, - "multipart/digest": { - "source": "iana" - }, - "multipart/encrypted": { - "source": "iana", - "compressible": false - }, - "multipart/form-data": { - "source": "iana", - "compressible": false - }, - "multipart/header-set": { - "source": "iana" - }, - "multipart/mixed": { - "source": "iana" - }, - "multipart/multilingual": { - "source": "iana" - }, - "multipart/parallel": { - "source": "iana" - }, - "multipart/related": { - "source": "iana", - "compressible": false - }, - "multipart/report": { - "source": "iana" - }, - "multipart/signed": { - "source": "iana", - "compressible": false - }, - "multipart/vnd.bint.med-plus": { - "source": "iana" - }, - "multipart/voice-message": { - "source": "iana" - }, - "multipart/x-mixed-replace": { - "source": "iana" - }, - "text/1d-interleaved-parityfec": { - "source": "iana" - }, - "text/cache-manifest": { - "source": "iana", - "compressible": true, - "extensions": ["appcache","manifest"] - }, - "text/calendar": { - "source": "iana", - "extensions": ["ics","ifb"] - }, - "text/calender": { - "compressible": true - }, - "text/cmd": { - "compressible": true - }, - "text/coffeescript": { - "extensions": ["coffee","litcoffee"] - }, - "text/css": { - "source": "iana", - "charset": "UTF-8", - "compressible": true, - "extensions": ["css"] - }, - "text/csv": { - "source": "iana", - "compressible": true, - "extensions": ["csv"] - }, - "text/csv-schema": { - "source": "iana" - }, - "text/directory": { - "source": "iana" - }, - "text/dns": { - "source": "iana" - }, - "text/ecmascript": { - "source": "iana" - }, - "text/encaprtp": { - "source": "iana" - }, - "text/enriched": { - "source": "iana" - }, - "text/flexfec": { - "source": "iana" - }, - "text/fwdred": { - "source": "iana" - }, - "text/grammar-ref-list": { - "source": "iana" - }, - "text/html": { - "source": "iana", - "compressible": true, - "extensions": ["html","htm","shtml"] - }, - "text/jade": { - "extensions": ["jade"] - }, - "text/javascript": { - "source": "iana", - "compressible": true - }, - "text/jcr-cnd": { - "source": "iana" - }, - "text/jsx": { - "compressible": true, - "extensions": ["jsx"] - }, - "text/less": { - "compressible": true, - "extensions": ["less"] - }, - "text/markdown": { - "source": "iana", - "compressible": true, - "extensions": ["markdown","md"] - }, - "text/mathml": { - "source": "nginx", - "extensions": ["mml"] - }, - "text/mdx": { - "compressible": true, - "extensions": ["mdx"] - }, - "text/mizar": { - "source": "iana" - }, - "text/n3": { - "source": "iana", - "compressible": true, - "extensions": ["n3"] - }, - "text/parameters": { - "source": "iana" - }, - "text/parityfec": { - "source": "iana" - }, - "text/plain": { - "source": "iana", - "compressible": true, - "extensions": ["txt","text","conf","def","list","log","in","ini"] - }, - "text/provenance-notation": { - "source": "iana" - }, - "text/prs.fallenstein.rst": { - "source": "iana" - }, - "text/prs.lines.tag": { - "source": "iana", - "extensions": ["dsc"] - }, - "text/prs.prop.logic": { - "source": "iana" - }, - "text/raptorfec": { - "source": "iana" - }, - "text/red": { - "source": "iana" - }, - "text/rfc822-headers": { - "source": "iana" - }, - "text/richtext": { - "source": "iana", - "compressible": true, - "extensions": ["rtx"] - }, - "text/rtf": { - "source": "iana", - "compressible": true, - "extensions": ["rtf"] - }, - "text/rtp-enc-aescm128": { - "source": "iana" - }, - "text/rtploopback": { - "source": "iana" - }, - "text/rtx": { - "source": "iana" - }, - "text/sgml": { - "source": "iana", - "extensions": ["sgml","sgm"] - }, - "text/shex": { - "extensions": ["shex"] - }, - "text/slim": { - "extensions": ["slim","slm"] - }, - "text/strings": { - "source": "iana" - }, - "text/stylus": { - "extensions": ["stylus","styl"] - }, - "text/t140": { - "source": "iana" - }, - "text/tab-separated-values": { - "source": "iana", - "compressible": true, - "extensions": ["tsv"] - }, - "text/troff": { - "source": "iana", - "extensions": ["t","tr","roff","man","me","ms"] - }, - "text/turtle": { - "source": "iana", - "charset": "UTF-8", - "extensions": ["ttl"] - }, - "text/ulpfec": { - "source": "iana" - }, - "text/uri-list": { - "source": "iana", - "compressible": true, - "extensions": ["uri","uris","urls"] - }, - "text/vcard": { - "source": "iana", - "compressible": true, - "extensions": ["vcard"] - }, - "text/vnd.a": { - "source": "iana" - }, - "text/vnd.abc": { - "source": "iana" - }, - "text/vnd.ascii-art": { - "source": "iana" - }, - "text/vnd.curl": { - "source": "iana", - "extensions": ["curl"] - }, - "text/vnd.curl.dcurl": { - "source": "apache", - "extensions": ["dcurl"] - }, - "text/vnd.curl.mcurl": { - "source": "apache", - "extensions": ["mcurl"] - }, - "text/vnd.curl.scurl": { - "source": "apache", - "extensions": ["scurl"] - }, - "text/vnd.debian.copyright": { - "source": "iana" - }, - "text/vnd.dmclientscript": { - "source": "iana" - }, - "text/vnd.dvb.subtitle": { - "source": "iana", - "extensions": ["sub"] - }, - "text/vnd.esmertec.theme-descriptor": { - "source": "iana" - }, - "text/vnd.ficlab.flt": { - "source": "iana" - }, - "text/vnd.fly": { - "source": "iana", - "extensions": ["fly"] - }, - "text/vnd.fmi.flexstor": { - "source": "iana", - "extensions": ["flx"] - }, - "text/vnd.gml": { - "source": "iana" - }, - "text/vnd.graphviz": { - "source": "iana", - "extensions": ["gv"] - }, - "text/vnd.hgl": { - "source": "iana" - }, - "text/vnd.in3d.3dml": { - "source": "iana", - "extensions": ["3dml"] - }, - "text/vnd.in3d.spot": { - "source": "iana", - "extensions": ["spot"] - }, - "text/vnd.iptc.newsml": { - "source": "iana" - }, - "text/vnd.iptc.nitf": { - "source": "iana" - }, - "text/vnd.latex-z": { - "source": "iana" - }, - "text/vnd.motorola.reflex": { - "source": "iana" - }, - "text/vnd.ms-mediapackage": { - "source": "iana" - }, - "text/vnd.net2phone.commcenter.command": { - "source": "iana" - }, - "text/vnd.radisys.msml-basic-layout": { - "source": "iana" - }, - "text/vnd.senx.warpscript": { - "source": "iana" - }, - "text/vnd.si.uricatalogue": { - "source": "iana" - }, - "text/vnd.sosi": { - "source": "iana" - }, - "text/vnd.sun.j2me.app-descriptor": { - "source": "iana", - "extensions": ["jad"] - }, - "text/vnd.trolltech.linguist": { - "source": "iana" - }, - "text/vnd.wap.si": { - "source": "iana" - }, - "text/vnd.wap.sl": { - "source": "iana" - }, - "text/vnd.wap.wml": { - "source": "iana", - "extensions": ["wml"] - }, - "text/vnd.wap.wmlscript": { - "source": "iana", - "extensions": ["wmls"] - }, - "text/vtt": { - "source": "iana", - "charset": "UTF-8", - "compressible": true, - "extensions": ["vtt"] - }, - "text/x-asm": { - "source": "apache", - "extensions": ["s","asm"] - }, - "text/x-c": { - "source": "apache", - "extensions": ["c","cc","cxx","cpp","h","hh","dic"] - }, - "text/x-component": { - "source": "nginx", - "extensions": ["htc"] - }, - "text/x-fortran": { - "source": "apache", - "extensions": ["f","for","f77","f90"] - }, - "text/x-gwt-rpc": { - "compressible": true - }, - "text/x-handlebars-template": { - "extensions": ["hbs"] - }, - "text/x-java-source": { - "source": "apache", - "extensions": ["java"] - }, - "text/x-jquery-tmpl": { - "compressible": true - }, - "text/x-lua": { - "extensions": ["lua"] - }, - "text/x-markdown": { - "compressible": true, - "extensions": ["mkd"] - }, - "text/x-nfo": { - "source": "apache", - "extensions": ["nfo"] - }, - "text/x-opml": { - "source": "apache", - "extensions": ["opml"] - }, - "text/x-org": { - "compressible": true, - "extensions": ["org"] - }, - "text/x-pascal": { - "source": "apache", - "extensions": ["p","pas"] - }, - "text/x-processing": { - "compressible": true, - "extensions": ["pde"] - }, - "text/x-sass": { - "extensions": ["sass"] - }, - "text/x-scss": { - "extensions": ["scss"] - }, - "text/x-setext": { - "source": "apache", - "extensions": ["etx"] - }, - "text/x-sfv": { - "source": "apache", - "extensions": ["sfv"] - }, - "text/x-suse-ymp": { - "compressible": true, - "extensions": ["ymp"] - }, - "text/x-uuencode": { - "source": "apache", - "extensions": ["uu"] - }, - "text/x-vcalendar": { - "source": "apache", - "extensions": ["vcs"] - }, - "text/x-vcard": { - "source": "apache", - "extensions": ["vcf"] - }, - "text/xml": { - "source": "iana", - "compressible": true, - "extensions": ["xml"] - }, - "text/xml-external-parsed-entity": { - "source": "iana" - }, - "text/yaml": { - "extensions": ["yaml","yml"] - }, - "video/1d-interleaved-parityfec": { - "source": "iana" - }, - "video/3gpp": { - "source": "iana", - "extensions": ["3gp","3gpp"] - }, - "video/3gpp-tt": { - "source": "iana" - }, - "video/3gpp2": { - "source": "iana", - "extensions": ["3g2"] - }, - "video/bmpeg": { - "source": "iana" - }, - "video/bt656": { - "source": "iana" - }, - "video/celb": { - "source": "iana" - }, - "video/dv": { - "source": "iana" - }, - "video/encaprtp": { - "source": "iana" - }, - "video/flexfec": { - "source": "iana" - }, - "video/h261": { - "source": "iana", - "extensions": ["h261"] - }, - "video/h263": { - "source": "iana", - "extensions": ["h263"] - }, - "video/h263-1998": { - "source": "iana" - }, - "video/h263-2000": { - "source": "iana" - }, - "video/h264": { - "source": "iana", - "extensions": ["h264"] - }, - "video/h264-rcdo": { - "source": "iana" - }, - "video/h264-svc": { - "source": "iana" - }, - "video/h265": { - "source": "iana" - }, - "video/iso.segment": { - "source": "iana" - }, - "video/jpeg": { - "source": "iana", - "extensions": ["jpgv"] - }, - "video/jpeg2000": { - "source": "iana" - }, - "video/jpm": { - "source": "apache", - "extensions": ["jpm","jpgm"] - }, - "video/mj2": { - "source": "iana", - "extensions": ["mj2","mjp2"] - }, - "video/mp1s": { - "source": "iana" - }, - "video/mp2p": { - "source": "iana" - }, - "video/mp2t": { - "source": "iana", - "extensions": ["ts"] - }, - "video/mp4": { - "source": "iana", - "compressible": false, - "extensions": ["mp4","mp4v","mpg4"] - }, - "video/mp4v-es": { - "source": "iana" - }, - "video/mpeg": { - "source": "iana", - "compressible": false, - "extensions": ["mpeg","mpg","mpe","m1v","m2v"] - }, - "video/mpeg4-generic": { - "source": "iana" - }, - "video/mpv": { - "source": "iana" - }, - "video/nv": { - "source": "iana" - }, - "video/ogg": { - "source": "iana", - "compressible": false, - "extensions": ["ogv"] - }, - "video/parityfec": { - "source": "iana" - }, - "video/pointer": { - "source": "iana" - }, - "video/quicktime": { - "source": "iana", - "compressible": false, - "extensions": ["qt","mov"] - }, - "video/raptorfec": { - "source": "iana" - }, - "video/raw": { - "source": "iana" - }, - "video/rtp-enc-aescm128": { - "source": "iana" - }, - "video/rtploopback": { - "source": "iana" - }, - "video/rtx": { - "source": "iana" - }, - "video/smpte291": { - "source": "iana" - }, - "video/smpte292m": { - "source": "iana" - }, - "video/ulpfec": { - "source": "iana" - }, - "video/vc1": { - "source": "iana" - }, - "video/vc2": { - "source": "iana" - }, - "video/vnd.cctv": { - "source": "iana" - }, - "video/vnd.dece.hd": { - "source": "iana", - "extensions": ["uvh","uvvh"] - }, - "video/vnd.dece.mobile": { - "source": "iana", - "extensions": ["uvm","uvvm"] - }, - "video/vnd.dece.mp4": { - "source": "iana" - }, - "video/vnd.dece.pd": { - "source": "iana", - "extensions": ["uvp","uvvp"] - }, - "video/vnd.dece.sd": { - "source": "iana", - "extensions": ["uvs","uvvs"] - }, - "video/vnd.dece.video": { - "source": "iana", - "extensions": ["uvv","uvvv"] - }, - "video/vnd.directv.mpeg": { - "source": "iana" - }, - "video/vnd.directv.mpeg-tts": { - "source": "iana" - }, - "video/vnd.dlna.mpeg-tts": { - "source": "iana" - }, - "video/vnd.dvb.file": { - "source": "iana", - "extensions": ["dvb"] - }, - "video/vnd.fvt": { - "source": "iana", - "extensions": ["fvt"] - }, - "video/vnd.hns.video": { - "source": "iana" - }, - "video/vnd.iptvforum.1dparityfec-1010": { - "source": "iana" - }, - "video/vnd.iptvforum.1dparityfec-2005": { - "source": "iana" - }, - "video/vnd.iptvforum.2dparityfec-1010": { - "source": "iana" - }, - "video/vnd.iptvforum.2dparityfec-2005": { - "source": "iana" - }, - "video/vnd.iptvforum.ttsavc": { - "source": "iana" - }, - "video/vnd.iptvforum.ttsmpeg2": { - "source": "iana" - }, - "video/vnd.motorola.video": { - "source": "iana" - }, - "video/vnd.motorola.videop": { - "source": "iana" - }, - "video/vnd.mpegurl": { - "source": "iana", - "extensions": ["mxu","m4u"] - }, - "video/vnd.ms-playready.media.pyv": { - "source": "iana", - "extensions": ["pyv"] - }, - "video/vnd.nokia.interleaved-multimedia": { - "source": "iana" - }, - "video/vnd.nokia.mp4vr": { - "source": "iana" - }, - "video/vnd.nokia.videovoip": { - "source": "iana" - }, - "video/vnd.objectvideo": { - "source": "iana" - }, - "video/vnd.radgamettools.bink": { - "source": "iana" - }, - "video/vnd.radgamettools.smacker": { - "source": "iana" - }, - "video/vnd.sealed.mpeg1": { - "source": "iana" - }, - "video/vnd.sealed.mpeg4": { - "source": "iana" - }, - "video/vnd.sealed.swf": { - "source": "iana" - }, - "video/vnd.sealedmedia.softseal.mov": { - "source": "iana" - }, - "video/vnd.uvvu.mp4": { - "source": "iana", - "extensions": ["uvu","uvvu"] - }, - "video/vnd.vivo": { - "source": "iana", - "extensions": ["viv"] - }, - "video/vnd.youtube.yt": { - "source": "iana" - }, - "video/vp8": { - "source": "iana" - }, - "video/webm": { - "source": "apache", - "compressible": false, - "extensions": ["webm"] - }, - "video/x-f4v": { - "source": "apache", - "extensions": ["f4v"] - }, - "video/x-fli": { - "source": "apache", - "extensions": ["fli"] - }, - "video/x-flv": { - "source": "apache", - "compressible": false, - "extensions": ["flv"] - }, - "video/x-m4v": { - "source": "apache", - "extensions": ["m4v"] - }, - "video/x-matroska": { - "source": "apache", - "compressible": false, - "extensions": ["mkv","mk3d","mks"] - }, - "video/x-mng": { - "source": "apache", - "extensions": ["mng"] - }, - "video/x-ms-asf": { - "source": "apache", - "extensions": ["asf","asx"] - }, - "video/x-ms-vob": { - "source": "apache", - "extensions": ["vob"] - }, - "video/x-ms-wm": { - "source": "apache", - "extensions": ["wm"] - }, - "video/x-ms-wmv": { - "source": "apache", - "compressible": false, - "extensions": ["wmv"] - }, - "video/x-ms-wmx": { - "source": "apache", - "extensions": ["wmx"] - }, - "video/x-ms-wvx": { - "source": "apache", - "extensions": ["wvx"] - }, - "video/x-msvideo": { - "source": "apache", - "extensions": ["avi"] - }, - "video/x-sgi-movie": { - "source": "apache", - "extensions": ["movie"] - }, - "video/x-smv": { - "source": "apache", - "extensions": ["smv"] - }, - "x-conference/x-cooltalk": { - "source": "apache", - "extensions": ["ice"] - }, - "x-shader/x-fragment": { - "compressible": true - }, - "x-shader/x-vertex": { - "compressible": true - } -} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js deleted file mode 100644 index 551031f..0000000 --- a/node_modules/mime-db/index.js +++ /dev/null @@ -1,11 +0,0 @@ -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * MIT Licensed - */ - -/** - * Module exports. - */ - -module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json deleted file mode 100644 index 5abe402..0000000 --- a/node_modules/mime-db/package.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "_from": "mime-db@1.43.0", - "_id": "mime-db@1.43.0", - "_inBundle": false, - "_integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", - "_location": "/mime-db", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "mime-db@1.43.0", - "name": "mime-db", - "escapedName": "mime-db", - "rawSpec": "1.43.0", - "saveSpec": null, - "fetchSpec": "1.43.0" - }, - "_requiredBy": [ - "/mime-types" - ], - "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", - "_shasum": "0a12e0502650e473d735535050e7c8f4eb4fae58", - "_spec": "mime-db@1.43.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/mime-types", - "bugs": { - "url": "https://github.com/jshttp/mime-db/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Douglas Christopher Wilson", - "email": "doug@somethingdoug.com" - }, - { - "name": "Jonathan Ong", - "email": "me@jongleberry.com", - "url": "http://jongleberry.com" - }, - { - "name": "Robert Kieffer", - "email": "robert@broofa.com", - "url": "http://github.com/broofa" - } - ], - "deprecated": false, - "description": "Media Type Database", - "devDependencies": { - "bluebird": "3.7.2", - "co": "4.6.0", - "cogent": "1.0.1", - "csv-parse": "4.8.3", - "eslint": "6.8.0", - "eslint-config-standard": "14.1.0", - "eslint-plugin-import": "2.19.1", - "eslint-plugin-node": "11.0.0", - "eslint-plugin-promise": "4.2.1", - "eslint-plugin-standard": "4.0.1", - "gnode": "0.1.2", - "mocha": "7.0.0", - "nyc": "15.0.0", - "raw-body": "2.4.1", - "stream-to-array": "2.3.0" - }, - "engines": { - "node": ">= 0.6" - }, - "files": [ - "HISTORY.md", - "LICENSE", - "README.md", - "db.json", - "index.js" - ], - "homepage": "https://github.com/jshttp/mime-db#readme", - "keywords": [ - "mime", - "db", - "type", - "types", - "database", - "charset", - "charsets" - ], - "license": "MIT", - "name": "mime-db", - "repository": { - "type": "git", - "url": "git+https://github.com/jshttp/mime-db.git" - }, - "scripts": { - "build": "node scripts/build", - "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", - "lint": "eslint .", - "test": "mocha --reporter spec --bail --check-leaks test/", - "test-cov": "nyc --reporter=html --reporter=text npm test", - "test-travis": "nyc --reporter=text npm test", - "update": "npm run fetch && npm run build", - "version": "node scripts/version-history.js && git add HISTORY.md" - }, - "version": "1.43.0" -} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md deleted file mode 100644 index db3b311..0000000 --- a/node_modules/mime-types/HISTORY.md +++ /dev/null @@ -1,325 +0,0 @@ -2.1.26 / 2020-01-05 -=================== - - * deps: mime-db@1.43.0 - - Add `application/x-keepass2` with extension `.kdbx` - - Add extension `.mxmf` to `audio/mobile-xmf` - - Add extensions from IANA for `application/*+xml` types - - Add new upstream MIME types - -2.1.25 / 2019-11-12 -=================== - - * deps: mime-db@1.42.0 - - Add new upstream MIME types - - Add `application/toml` with extension `.toml` - - Add `image/vnd.ms-dds` with extension `.dds` - -2.1.24 / 2019-04-20 -=================== - - * deps: mime-db@1.40.0 - - Add extensions from IANA for `model/*` types - - Add `text/mdx` with extension `.mdx` - -2.1.23 / 2019-04-17 -=================== - - * deps: mime-db@~1.39.0 - - Add extensions `.siv` and `.sieve` to `application/sieve` - - Add new upstream MIME types - -2.1.22 / 2019-02-14 -=================== - - * deps: mime-db@~1.38.0 - - Add extension `.nq` to `application/n-quads` - - Add extension `.nt` to `application/n-triples` - - Add new upstream MIME types - - Mark `text/less` as compressible - -2.1.21 / 2018-10-19 -=================== - - * deps: mime-db@~1.37.0 - - Add extensions to HEIC image types - - Add new upstream MIME types - -2.1.20 / 2018-08-26 -=================== - - * deps: mime-db@~1.36.0 - - Add Apple file extensions from IANA - - Add extensions from IANA for `image/*` types - - Add new upstream MIME types - -2.1.19 / 2018-07-17 -=================== - - * deps: mime-db@~1.35.0 - - Add extension `.csl` to `application/vnd.citationstyles.style+xml` - - Add extension `.es` to `application/ecmascript` - - Add extension `.owl` to `application/rdf+xml` - - Add new upstream MIME types - - Add UTF-8 as default charset for `text/turtle` - -2.1.18 / 2018-02-16 -=================== - - * deps: mime-db@~1.33.0 - - Add `application/raml+yaml` with extension `.raml` - - Add `application/wasm` with extension `.wasm` - - Add `text/shex` with extension `.shex` - - Add extensions for JPEG-2000 images - - Add extensions from IANA for `message/*` types - - Add new upstream MIME types - - Update font MIME types - - Update `text/hjson` to registered `application/hjson` - -2.1.17 / 2017-09-01 -=================== - - * deps: mime-db@~1.30.0 - - Add `application/vnd.ms-outlook` - - Add `application/x-arj` - - Add extension `.mjs` to `application/javascript` - - Add glTF types and extensions - - Add new upstream MIME types - - Add `text/x-org` - - Add VirtualBox MIME types - - Fix `source` records for `video/*` types that are IANA - - Update `font/opentype` to registered `font/otf` - -2.1.16 / 2017-07-24 -=================== - - * deps: mime-db@~1.29.0 - - Add `application/fido.trusted-apps+json` - - Add extension `.wadl` to `application/vnd.sun.wadl+xml` - - Add extension `.gz` to `application/gzip` - - Add new upstream MIME types - - Update extensions `.md` and `.markdown` to be `text/markdown` - -2.1.15 / 2017-03-23 -=================== - - * deps: mime-db@~1.27.0 - - Add new mime types - - Add `image/apng` - -2.1.14 / 2017-01-14 -=================== - - * deps: mime-db@~1.26.0 - - Add new mime types - -2.1.13 / 2016-11-18 -=================== - - * deps: mime-db@~1.25.0 - - Add new mime types - -2.1.12 / 2016-09-18 -=================== - - * deps: mime-db@~1.24.0 - - Add new mime types - - Add `audio/mp3` - -2.1.11 / 2016-05-01 -=================== - - * deps: mime-db@~1.23.0 - - Add new mime types - -2.1.10 / 2016-02-15 -=================== - - * deps: mime-db@~1.22.0 - - Add new mime types - - Fix extension of `application/dash+xml` - - Update primary extension for `audio/mp4` - -2.1.9 / 2016-01-06 -================== - - * deps: mime-db@~1.21.0 - - Add new mime types - -2.1.8 / 2015-11-30 -================== - - * deps: mime-db@~1.20.0 - - Add new mime types - -2.1.7 / 2015-09-20 -================== - - * deps: mime-db@~1.19.0 - - Add new mime types - -2.1.6 / 2015-09-03 -================== - - * deps: mime-db@~1.18.0 - - Add new mime types - -2.1.5 / 2015-08-20 -================== - - * deps: mime-db@~1.17.0 - - Add new mime types - -2.1.4 / 2015-07-30 -================== - - * deps: mime-db@~1.16.0 - - Add new mime types - -2.1.3 / 2015-07-13 -================== - - * deps: mime-db@~1.15.0 - - Add new mime types - -2.1.2 / 2015-06-25 -================== - - * deps: mime-db@~1.14.0 - - Add new mime types - -2.1.1 / 2015-06-08 -================== - - * perf: fix deopt during mapping - -2.1.0 / 2015-06-07 -================== - - * Fix incorrectly treating extension-less file name as extension - - i.e. `'path/to/json'` will no longer return `application/json` - * Fix `.charset(type)` to accept parameters - * Fix `.charset(type)` to match case-insensitive - * Improve generation of extension to MIME mapping - * Refactor internals for readability and no argument reassignment - * Prefer `application/*` MIME types from the same source - * Prefer any type over `application/octet-stream` - * deps: mime-db@~1.13.0 - - Add nginx as a source - - Add new mime types - -2.0.14 / 2015-06-06 -=================== - - * deps: mime-db@~1.12.0 - - Add new mime types - -2.0.13 / 2015-05-31 -=================== - - * deps: mime-db@~1.11.0 - - Add new mime types - -2.0.12 / 2015-05-19 -=================== - - * deps: mime-db@~1.10.0 - - Add new mime types - -2.0.11 / 2015-05-05 -=================== - - * deps: mime-db@~1.9.1 - - Add new mime types - -2.0.10 / 2015-03-13 -=================== - - * deps: mime-db@~1.8.0 - - Add new mime types - -2.0.9 / 2015-02-09 -================== - - * deps: mime-db@~1.7.0 - - Add new mime types - - Community extensions ownership transferred from `node-mime` - -2.0.8 / 2015-01-29 -================== - - * deps: mime-db@~1.6.0 - - Add new mime types - -2.0.7 / 2014-12-30 -================== - - * deps: mime-db@~1.5.0 - - Add new mime types - - Fix various invalid MIME type entries - -2.0.6 / 2014-12-30 -================== - - * deps: mime-db@~1.4.0 - - Add new mime types - - Fix various invalid MIME type entries - - Remove example template MIME types - -2.0.5 / 2014-12-29 -================== - - * deps: mime-db@~1.3.1 - - Fix missing extensions - -2.0.4 / 2014-12-10 -================== - - * deps: mime-db@~1.3.0 - - Add new mime types - -2.0.3 / 2014-11-09 -================== - - * deps: mime-db@~1.2.0 - - Add new mime types - -2.0.2 / 2014-09-28 -================== - - * deps: mime-db@~1.1.0 - - Add new mime types - - Add additional compressible - - Update charsets - -2.0.1 / 2014-09-07 -================== - - * Support Node.js 0.6 - -2.0.0 / 2014-09-02 -================== - - * Use `mime-db` - * Remove `.define()` - -1.0.2 / 2014-08-04 -================== - - * Set charset=utf-8 for `text/javascript` - -1.0.1 / 2014-06-24 -================== - - * Add `text/jsx` type - -1.0.0 / 2014-05-12 -================== - - * Return `false` for unknown types - * Set charset=utf-8 for `application/json` - -0.1.0 / 2014-05-02 -================== - - * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE deleted file mode 100644 index 0616607..0000000 --- a/node_modules/mime-types/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Jonathan Ong -Copyright (c) 2015 Douglas Christopher Wilson - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md deleted file mode 100644 index 1dbef2b..0000000 --- a/node_modules/mime-types/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# mime-types - -[![NPM Version][npm-version-image]][npm-url] -[![NPM Downloads][npm-downloads-image]][npm-url] -[![Node.js Version][node-version-image]][node-version-url] -[![Build Status][travis-image]][travis-url] -[![Test Coverage][coveralls-image]][coveralls-url] - -The ultimate javascript content-type utility. - -Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: - -- __No fallbacks.__ Instead of naively returning the first available type, - `mime-types` simply returns `false`, so do - `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. -- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. -- No `.define()` functionality -- Bug fixes for `.lookup(path)` - -Otherwise, the API is compatible with `mime` 1.x. - -## Install - -This is a [Node.js](https://nodejs.org/en/) module available through the -[npm registry](https://www.npmjs.com/). Installation is done using the -[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): - -```sh -$ npm install mime-types -``` - -## Adding Types - -All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), -so open a PR there if you'd like to add mime types. - -## API - -```js -var mime = require('mime-types') -``` - -All functions return `false` if input is invalid or not found. - -### mime.lookup(path) - -Lookup the content-type associated with a file. - -```js -mime.lookup('json') // 'application/json' -mime.lookup('.md') // 'text/markdown' -mime.lookup('file.html') // 'text/html' -mime.lookup('folder/file.js') // 'application/javascript' -mime.lookup('folder/.htaccess') // false - -mime.lookup('cats') // false -``` - -### mime.contentType(type) - -Create a full content-type header given a content-type or extension. -When given an extension, `mime.lookup` is used to get the matching -content-type, otherwise the given content-type is used. Then if the -content-type does not already have a `charset` parameter, `mime.charset` -is used to get the default charset and add to the returned content-type. - -```js -mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' -mime.contentType('file.json') // 'application/json; charset=utf-8' -mime.contentType('text/html') // 'text/html; charset=utf-8' -mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' - -// from a full path -mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' -``` - -### mime.extension(type) - -Get the default extension for a content-type. - -```js -mime.extension('application/octet-stream') // 'bin' -``` - -### mime.charset(type) - -Lookup the implied default charset of a content-type. - -```js -mime.charset('text/markdown') // 'UTF-8' -``` - -### var type = mime.types[extension] - -A map of content-types by extension. - -### [extensions...] = mime.extensions[type] - -A map of extensions by content-type. - -## License - -[MIT](LICENSE) - -[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master -[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master -[node-version-image]: https://badgen.net/npm/node/mime-types -[node-version-url]: https://nodejs.org/en/download -[npm-downloads-image]: https://badgen.net/npm/dm/mime-types -[npm-url]: https://npmjs.org/package/mime-types -[npm-version-image]: https://badgen.net/npm/v/mime-types -[travis-image]: https://badgen.net/travis/jshttp/mime-types/master -[travis-url]: https://travis-ci.org/jshttp/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js deleted file mode 100644 index b9f34d5..0000000 --- a/node_modules/mime-types/index.js +++ /dev/null @@ -1,188 +0,0 @@ -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - -'use strict' - -/** - * Module dependencies. - * @private - */ - -var db = require('mime-db') -var extname = require('path').extname - -/** - * Module variables. - * @private - */ - -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i - -/** - * Module exports. - * @public - */ - -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) - -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) - -/** - * Get the default charset for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ - -function charset (type) { - if (!type || typeof type !== 'string') { - return false - } - - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] - - if (mime && mime.charset) { - return mime.charset - } - - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' - } - - return false -} - -/** - * Create a full Content-Type header given a MIME type or extension. - * - * @param {string} str - * @return {boolean|string} - */ - -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } - - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str - - if (!mime) { - return false - } - - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() - } - - return mime -} - -/** - * Get the default extension for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ - -function extension (type) { - if (!type || typeof type !== 'string') { - return false - } - - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] - - if (!exts || !exts.length) { - return false - } - - return exts[0] -} - -/** - * Lookup the MIME type for a file path/extension. - * - * @param {string} path - * @return {boolean|string} - */ - -function lookup (path) { - if (!path || typeof path !== 'string') { - return false - } - - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) - - if (!extension) { - return false - } - - return exports.types[extension] || false -} - -/** - * Populate the extensions and types maps. - * @private - */ - -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] - - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions - - if (!exts || !exts.length) { - return - } - - // mime -> extensions - extensions[type] = exts - - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] - - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) - - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue - } - } - - // set the extension -> mime - types[extension] = type - } - }) -} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json deleted file mode 100644 index 4fac98b..0000000 --- a/node_modules/mime-types/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "_from": "mime-types@^2.1.12", - "_id": "mime-types@2.1.26", - "_inBundle": false, - "_integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", - "_location": "/mime-types", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "mime-types@^2.1.12", - "name": "mime-types", - "escapedName": "mime-types", - "rawSpec": "^2.1.12", - "saveSpec": null, - "fetchSpec": "^2.1.12" - }, - "_requiredBy": [ - "/form-data", - "/request", - "/request/form-data" - ], - "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", - "_shasum": "9c921fc09b7e149a65dfdc0da4d20997200b0a06", - "_spec": "mime-types@^2.1.12", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/form-data", - "bugs": { - "url": "https://github.com/jshttp/mime-types/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Douglas Christopher Wilson", - "email": "doug@somethingdoug.com" - }, - { - "name": "Jeremiah Senkpiel", - "email": "fishrock123@rocketmail.com", - "url": "https://searchbeam.jit.su" - }, - { - "name": "Jonathan Ong", - "email": "me@jongleberry.com", - "url": "http://jongleberry.com" - } - ], - "dependencies": { - "mime-db": "1.43.0" - }, - "deprecated": false, - "description": "The ultimate javascript content-type utility.", - "devDependencies": { - "eslint": "6.8.0", - "eslint-config-standard": "14.1.0", - "eslint-plugin-import": "2.19.1", - "eslint-plugin-node": "11.0.0", - "eslint-plugin-promise": "4.2.1", - "eslint-plugin-standard": "4.0.1", - "mocha": "7.0.0", - "nyc": "15.0.0" - }, - "engines": { - "node": ">= 0.6" - }, - "files": [ - "HISTORY.md", - "LICENSE", - "index.js" - ], - "homepage": "https://github.com/jshttp/mime-types#readme", - "keywords": [ - "mime", - "types" - ], - "license": "MIT", - "name": "mime-types", - "repository": { - "type": "git", - "url": "git+https://github.com/jshttp/mime-types.git" - }, - "scripts": { - "lint": "eslint .", - "test": "mocha --reporter spec test/test.js", - "test-cov": "nyc --reporter=html --reporter=text npm test", - "test-travis": "nyc --reporter=text npm test" - }, - "version": "2.1.26" -} diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js deleted file mode 100644 index c4498bc..0000000 --- a/node_modules/ms/index.js +++ /dev/null @@ -1,162 +0,0 @@ -/** - * Helpers. - */ - -var s = 1000; -var m = s * 60; -var h = m * 60; -var d = h * 24; -var w = d * 7; -var y = d * 365.25; - -/** - * Parse or format the given `val`. - * - * Options: - * - * - `long` verbose formatting [false] - * - * @param {String|Number} val - * @param {Object} [options] - * @throws {Error} throw an error if val is not a non-empty string or a number - * @return {String|Number} - * @api public - */ - -module.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === 'string' && val.length > 0) { - return parse(val); - } else if (type === 'number' && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - 'val is not a non-empty string or a valid number. val=' + - JSON.stringify(val) - ); -}; - -/** - * Parse the given `str` and return milliseconds. - * - * @param {String} str - * @return {Number} - * @api private - */ - -function parse(str) { - str = String(str); - if (str.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type = (match[2] || 'ms').toLowerCase(); - switch (type) { - case 'years': - case 'year': - case 'yrs': - case 'yr': - case 'y': - return n * y; - case 'weeks': - case 'week': - case 'w': - return n * w; - case 'days': - case 'day': - case 'd': - return n * d; - case 'hours': - case 'hour': - case 'hrs': - case 'hr': - case 'h': - return n * h; - case 'minutes': - case 'minute': - case 'mins': - case 'min': - case 'm': - return n * m; - case 'seconds': - case 'second': - case 'secs': - case 'sec': - case 's': - return n * s; - case 'milliseconds': - case 'millisecond': - case 'msecs': - case 'msec': - case 'ms': - return n; - default: - return undefined; - } -} - -/** - * Short format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + 'd'; - } - if (msAbs >= h) { - return Math.round(ms / h) + 'h'; - } - if (msAbs >= m) { - return Math.round(ms / m) + 'm'; - } - if (msAbs >= s) { - return Math.round(ms / s) + 's'; - } - return ms + 'ms'; -} - -/** - * Long format for `ms`. - * - * @param {Number} ms - * @return {String} - * @api private - */ - -function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, 'day'); - } - if (msAbs >= h) { - return plural(ms, msAbs, h, 'hour'); - } - if (msAbs >= m) { - return plural(ms, msAbs, m, 'minute'); - } - if (msAbs >= s) { - return plural(ms, msAbs, s, 'second'); - } - return ms + ' ms'; -} - -/** - * Pluralization helper. - */ - -function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); -} diff --git a/node_modules/ms/license.md b/node_modules/ms/license.md deleted file mode 100644 index 69b6125..0000000 --- a/node_modules/ms/license.md +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Zeit, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json deleted file mode 100644 index b9546b5..0000000 --- a/node_modules/ms/package.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "_from": "ms@^2.1.2", - "_id": "ms@2.1.2", - "_inBundle": false, - "_integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "_location": "/ms", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "ms@^2.1.2", - "name": "ms", - "escapedName": "ms", - "rawSpec": "^2.1.2", - "saveSpec": null, - "fetchSpec": "^2.1.2" - }, - "_requiredBy": [ - "/", - "/cpx2/debug", - "/dependency-check/debug", - "/eslint/debug" - ], - "_resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "_shasum": "d09d1f357b443f493382a8eb3ccd183872ae6009", - "_spec": "ms@^2.1.2", - "_where": "/Users/bret/repos/deploy-to-neocities", - "bugs": { - "url": "https://github.com/zeit/ms/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "Tiny millisecond conversion utility", - "devDependencies": { - "eslint": "4.12.1", - "expect.js": "0.3.1", - "husky": "0.14.3", - "lint-staged": "5.0.0", - "mocha": "4.0.1" - }, - "eslintConfig": { - "extends": "eslint:recommended", - "env": { - "node": true, - "es6": true - } - }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/zeit/ms#readme", - "license": "MIT", - "lint-staged": { - "*.js": [ - "npm run lint", - "prettier --single-quote --write", - "git add" - ] - }, - "main": "./index", - "name": "ms", - "repository": { - "type": "git", - "url": "git+https://github.com/zeit/ms.git" - }, - "scripts": { - "lint": "eslint lib/* bin/*", - "precommit": "lint-staged", - "test": "mocha tests.js" - }, - "version": "2.1.2" -} diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md deleted file mode 100644 index 9a1996b..0000000 --- a/node_modules/ms/readme.md +++ /dev/null @@ -1,60 +0,0 @@ -# ms - -[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) -[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) - -Use this package to easily convert various time formats to milliseconds. - -## Examples - -```js -ms('2 days') // 172800000 -ms('1d') // 86400000 -ms('10h') // 36000000 -ms('2.5 hrs') // 9000000 -ms('2h') // 7200000 -ms('1m') // 60000 -ms('5s') // 5000 -ms('1y') // 31557600000 -ms('100') // 100 -ms('-3 days') // -259200000 -ms('-1h') // -3600000 -ms('-200') // -200 -``` - -### Convert from Milliseconds - -```js -ms(60000) // "1m" -ms(2 * 60000) // "2m" -ms(-3 * 60000) // "-3m" -ms(ms('10 hours')) // "10h" -``` - -### Time Format Written-Out - -```js -ms(60000, { long: true }) // "1 minute" -ms(2 * 60000, { long: true }) // "2 minutes" -ms(-3 * 60000, { long: true }) // "-3 minutes" -ms(ms('10 hours'), { long: true }) // "10 hours" -``` - -## Features - -- Works both in [Node.js](https://nodejs.org) and in the browser -- If a number is supplied to `ms`, a string with a unit is returned -- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) -- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned - -## Related Packages - -- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. - -## Caught a Bug? - -1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device -2. Link the package to the global module directory: `npm link` -3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! - -As always, you can run the tests using: `npm test` diff --git a/node_modules/nanoassert/.travis.yml b/node_modules/nanoassert/.travis.yml deleted file mode 100644 index c700def..0000000 --- a/node_modules/nanoassert/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -sudo: false -language: node_js -node_js: - - lts/* -jobs: - include: - - stage: npm release - node_js: node - script: echo "Deploying to npm ..." - deploy: - provider: npm - email: - secure: "Xxe2IEoT27tWFCCN3suwAtioeWSy5IhTEqqAIy6I6MbFAbqq4npU0lQ6mL5+kI5H6S2luFZwf1Eex1lXNfNAdGFI0sSnjOcKyw7HUsU/D7fNpQmtnnlSAJmz0z7qtWGzrNM/ihTFxAo5CYRJq2uemZ7tZTah0ePcXfiQxK3ZcnI2kS5yR+bQqZQVMy0yT0zHhTGoTsWaF3yAaZrlqLK5VNwIMahmRbz+BTtzNPgB9c9RA5tDla+GPevF3jaFw+Fwuv8M38idRERzJMJ7nYnBOC7XXssk25d+2ErsX61wqAxrXTaPmV9Mg6Njp5hLdl6PA2wMOSSHMfbqrRmjB2yvKzqvcB/Z34lhpd7/0PT2iWVA78aSGwLuTDIxcyeRNdYLMhVrMt/mPHTtzw2xckdg1NNkXEwyXowTLqMEB+8AnRt7Vuy/wOgw60l1AvYB3VJsmwkOR/bL3p8Q1weRj4hYJgB0W4YWeAh7d0NcvE4zwML849n5t37k1tHtcOY0ZVqea1ENJfja1IjFsNN4ki99y8wCm+ba5d+AJaqn0+ChluZfnQTqSqFHxfEEEy8v+TGXl+F8j6YT9iYfnyPHwYLjXx4GcFzlhSDiDF+Dxr155TU2XsSVB9zrL6Mv8TzOiazgosBFZO2Jz3EnWjEr21oFuKPHyb0FJ0nC5UMLHx3youU=" - api_key: # 1ed6 - secure: "zX7inYgOYTPpbMMU2BHPcGwbuPRadcFUfgANuvFk81ZWTgmoZkGSl/YUbacQmpg8QcuSvw6m5Gpv518hNKGHIlvKgf02PADJNoMoHNK+vpCzwhHxAEddAYO7Up18vNPs+erDlnDBa9LhnTbNeGh9bUvb1ylDDlvdsNCbDIU+dFsTxJCqG9jma0P5UJIbj6S+oMB+pVYZ64e82k7H3PE4PFctUPuahMcKhuO/rt9t/62IXzC7f1CIPwpm6bOfhkwtpO9UswyLkpscERIEWjLY5GgwIi/t4Zp3wV1J0EjczhA1+ZnUvCM9HelSAiWxqQrkCZSEFDI3Q9x9ZOgBCjwBt7BcNOb68UMASYFpVJZ4iVu+BmdhKPpSYqb+fIxxSFJuXOmND2wSNkxM/ETKkYRGu3rG9g52TMegtYYZXfD/oYLtozfdBVtCymjZ9Pt4Bw/IcpmcuS8gfWJqziXIlD+7hOaZHAwcTg7zpSQvnUN0pxcumztWCgSSUpIWffn2YdQRuM3ih7Za6Qeve7ZkuKdi6uzvuoLB8WXt6xy7X+KIG7Ng3LTvCWvIkBuOXNAV9sRyF5BNwm0QbVMAwYO5MeK2acUxOFxLFNQLEkl+2pKHWcDPAXFuX0sgRfCc336cCUAauN7B83IAScHGx50BjG4ldRPOPlXWQawRDcsBQm5h7W0=" - on: - tags: true - node: node diff --git a/node_modules/nanoassert/LICENSE b/node_modules/nanoassert/LICENSE deleted file mode 100644 index b45703b..0000000 --- a/node_modules/nanoassert/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2017, Emil Bay - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/nanoassert/README.md b/node_modules/nanoassert/README.md deleted file mode 100644 index 33c0bb1..0000000 --- a/node_modules/nanoassert/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# `nanoassert` - -[![Build Status](https://travis-ci.org/emilbayes/nanoassert.svg?branch=master)](https://travis-ci.org/emilbayes/nanoassert) - -> Nanoscale assertion module - -## Usage - -```js -var assert = require('nanoassert') - -assert(a !== b, `${a} !== ${b}`) -``` - -## API - -### `assert(declaration, [message])` - -Assert that `declaration` is truthy otherwise throw `AssertionError` with -optional `message`. In Javascript runtimes that use v8, you will get a nicer -stack trace with this error. -If you want friendlier messages you can use template strings to show the -assertion made like in the example above. - -## Why - -I like to write public facing code very defensively, but have reservations about -the size incurred by the `assert` module. I only use the top-level `assert` -method anyway. - -## `nanoassert@^1.1.0` - -Docs for the previous version, which is used by many modules on npm, can be -[found here](https://github.com/emilbayes/nanoassert/tree/v1.1.0) - -## Install - -```sh -npm install nanoassert -``` - -## License - -[ISC](LICENSE) diff --git a/node_modules/nanoassert/example.js b/node_modules/nanoassert/example.js deleted file mode 100644 index b074eaf..0000000 --- a/node_modules/nanoassert/example.js +++ /dev/null @@ -1,5 +0,0 @@ -var assert = require('.') - -var a = 1 -var b = 1 -assert(a !== b, `${a} !== ${b}`) diff --git a/node_modules/nanoassert/index.js b/node_modules/nanoassert/index.js deleted file mode 100644 index 61005a9..0000000 --- a/node_modules/nanoassert/index.js +++ /dev/null @@ -1,18 +0,0 @@ -module.exports = assert - -class AssertionError extends Error {} -AssertionError.prototype.name = 'AssertionError' - -/** - * Minimal assert function - * @param {any} t Value to check if falsy - * @param {string=} m Optional assertion error message - * @throws {AssertionError} - */ -function assert (t, m) { - if (!t) { - var err = new AssertionError(m) - if (Error.captureStackTrace) Error.captureStackTrace(err, assert) - throw err - } -} diff --git a/node_modules/nanoassert/package.json b/node_modules/nanoassert/package.json deleted file mode 100644 index 5c8e8af..0000000 --- a/node_modules/nanoassert/package.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "_from": "nanoassert@^2.0.0", - "_id": "nanoassert@2.0.0", - "_inBundle": false, - "_integrity": "sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA==", - "_location": "/nanoassert", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "nanoassert@^2.0.0", - "name": "nanoassert", - "escapedName": "nanoassert", - "rawSpec": "^2.0.0", - "saveSpec": null, - "fetchSpec": "^2.0.0" - }, - "_requiredBy": [ - "/", - "/async-neocities", - "/streamx" - ], - "_resolved": "https://registry.npmjs.org/nanoassert/-/nanoassert-2.0.0.tgz", - "_shasum": "a05f86de6c7a51618038a620f88878ed1e490c09", - "_spec": "nanoassert@^2.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities", - "author": { - "name": "Emil Bay", - "email": "github@tixz.dk" - }, - "bugs": { - "url": "https://github.com/emilbayes/nanoassert/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Nanoscale assertion module", - "devDependencies": { - "tape": "^4.9.1" - }, - "homepage": "https://github.com/emilbayes/nanoassert#readme", - "keywords": [ - "assert", - "unassert", - "power-assert", - "tiny", - "nano", - "pico" - ], - "license": "ISC", - "main": "index.js", - "name": "nanoassert", - "repository": { - "type": "git", - "url": "git+https://github.com/emilbayes/nanoassert.git" - }, - "scripts": { - "test": "tape test.js" - }, - "version": "2.0.0" -} diff --git a/node_modules/nanoassert/test.js b/node_modules/nanoassert/test.js deleted file mode 100644 index f195e6c..0000000 --- a/node_modules/nanoassert/test.js +++ /dev/null @@ -1,27 +0,0 @@ -var assert = require('.') -var test = require('tape') - -test('test', function (t) { - try { - assert(true === true) // test that it doesn't throw - t.pass('does not throw on truthy') - } catch (e) { - t.fail() - } - - t.throws(assert.bind(null, false), 'throws on falsy') - - try { - assert(false) - } catch (e) { - t.equal(e.message, '', 'default message') - } - - try { - assert(false, 'hello world') - } catch (e) { - t.equal(e.message, 'hello world', 'custom message') - } - - t.end() -}) diff --git a/node_modules/node-fetch/CHANGELOG.md b/node_modules/node-fetch/CHANGELOG.md deleted file mode 100644 index 188fcd3..0000000 --- a/node_modules/node-fetch/CHANGELOG.md +++ /dev/null @@ -1,266 +0,0 @@ - -Changelog -========= - - -# 2.x release - -## v2.6.0 - -- Enhance: `options.agent`, it now accepts a function that returns custom http(s).Agent instance based on current URL, see readme for more information. -- Fix: incorrect `Content-Length` was returned for stream body in 2.5.0 release; note that `node-fetch` doesn't calculate content length for stream body. -- Fix: `Response.url` should return empty string instead of `null` by default. - -## v2.5.0 - -- Enhance: `Response` object now includes `redirected` property. -- Enhance: `fetch()` now accepts third-party `Blob` implementation as body. -- Other: disable `package-lock.json` generation as we never commit them. -- Other: dev dependency update. -- Other: readme update. - -## v2.4.1 - -- Fix: `Blob` import rule for node < 10, as `Readable` isn't a named export. - -## v2.4.0 - -- Enhance: added `Brotli` compression support (using node's zlib). -- Enhance: updated `Blob` implementation per spec. -- Fix: set content type automatically for `URLSearchParams`. -- Fix: `Headers` now reject empty header names. -- Fix: test cases, as node 12+ no longer accepts invalid header response. - -## v2.3.0 - -- Enhance: added `AbortSignal` support, with README example. -- Enhance: handle invalid `Location` header during redirect by rejecting them explicitly with `FetchError`. -- Fix: update `browser.js` to support react-native environment, where `self` isn't available globally. - -## v2.2.1 - -- Fix: `compress` flag shouldn't overwrite existing `Accept-Encoding` header. -- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--exerimental-modules` flag. -- Other: Better README. - -## v2.2.0 - -- Enhance: Support all `ArrayBuffer` view types -- Enhance: Support Web Workers -- Enhance: Support Node.js' `--experimental-modules` mode; deprecate `.es.js` file -- Fix: Add `__esModule` property to the exports object -- Other: Better example in README for writing response to a file -- Other: More tests for Agent - -## v2.1.2 - -- Fix: allow `Body` methods to work on `ArrayBuffer`-backed `Body` objects -- Fix: reject promise returned by `Body` methods when the accumulated `Buffer` exceeds the maximum size -- Fix: support custom `Host` headers with any casing -- Fix: support importing `fetch()` from TypeScript in `browser.js` -- Fix: handle the redirect response body properly - -## v2.1.1 - -Fix packaging errors in v2.1.0. - -## v2.1.0 - -- Enhance: allow using ArrayBuffer as the `body` of a `fetch()` or `Request` -- Fix: store HTTP headers of a `Headers` object internally with the given case, for compatibility with older servers that incorrectly treated header names in a case-sensitive manner -- Fix: silently ignore invalid HTTP headers -- Fix: handle HTTP redirect responses without a `Location` header just like non-redirect responses -- Fix: include bodies when following a redirection when appropriate - -## v2.0.0 - -This is a major release. Check [our upgrade guide](https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2. - -### General changes - -- Major: Node.js 0.10.x and 0.12.x support is dropped -- Major: `require('node-fetch/lib/response')` etc. is now unsupported; use `require('node-fetch').Response` or ES6 module imports -- Enhance: start testing on Node.js v4.x, v6.x, v8.x LTS, as well as v9.x stable -- Enhance: use Rollup to produce a distributed bundle (less memory overhead and faster startup) -- Enhance: make `Object.prototype.toString()` on Headers, Requests, and Responses return correct class strings -- Other: rewrite in ES2015 using Babel -- Other: use Codecov for code coverage tracking -- Other: update package.json script for npm 5 -- Other: `encoding` module is now optional (alpha.7) -- Other: expose browser.js through package.json, avoid bundling mishaps (alpha.9) -- Other: allow TypeScript to `import` node-fetch by exposing default (alpha.9) - -### HTTP requests - -- Major: overwrite user's `Content-Length` if we can be sure our information is correct (per spec) -- Fix: errors in a response are caught before the body is accessed -- Fix: support WHATWG URL objects, created by `whatwg-url` package or `require('url').URL` in Node.js 7+ - -### Response and Request classes - -- Major: `response.text()` no longer attempts to detect encoding, instead always opting for UTF-8 (per spec); use `response.textConverted()` for the v1 behavior -- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content respose (per spec; reverts behavior changed in v1.6.2) -- Major: internal methods are no longer exposed -- Major: throw error when a `GET` or `HEAD` Request is constructed with a non-null body (per spec) -- Enhance: add `response.arrayBuffer()` (also applies to Requests) -- Enhance: add experimental `response.blob()` (also applies to Requests) -- Enhance: `URLSearchParams` is now accepted as a body -- Enhance: wrap `response.json()` json parsing error as `FetchError` -- Fix: fix Request and Response with `null` body - -### Headers class - -- Major: remove `headers.getAll()`; make `get()` return all headers delimited by commas (per spec) -- Enhance: make Headers iterable -- Enhance: make Headers constructor accept an array of tuples -- Enhance: make sure header names and values are valid in HTTP -- Fix: coerce Headers prototype function parameters to strings, where applicable - -### Documentation - -- Enhance: more comprehensive API docs -- Enhance: add a list of default headers in README - - -# 1.x release - -## backport releases (v1.7.0 and beyond) - -See [changelog on 1.x branch](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) for details. - -## v1.6.3 - -- Enhance: error handling document to explain `FetchError` design -- Fix: support `form-data` 2.x releases (requires `form-data` >= 2.1.0) - -## v1.6.2 - -- Enhance: minor document update -- Fix: response.json() returns empty object on 204 no-content response instead of throwing a syntax error - -## v1.6.1 - -- Fix: if `res.body` is a non-stream non-formdata object, we will call `body.toString` and send it as a string -- Fix: `counter` value is incorrectly set to `follow` value when wrapping Request instance -- Fix: documentation update - -## v1.6.0 - -- Enhance: added `res.buffer()` api for convenience, it returns body as a Node.js buffer -- Enhance: better old server support by handling raw deflate response -- Enhance: skip encoding detection for non-HTML/XML response -- Enhance: minor document update -- Fix: HEAD request doesn't need decompression, as body is empty -- Fix: `req.body` now accepts a Node.js buffer - -## v1.5.3 - -- Fix: handle 204 and 304 responses when body is empty but content-encoding is gzip/deflate -- Fix: allow resolving response and cloned response in any order -- Fix: avoid setting `content-length` when `form-data` body use streams -- Fix: send DELETE request with content-length when body is present -- Fix: allow any url when calling new Request, but still reject non-http(s) url in fetch - -## v1.5.2 - -- Fix: allow node.js core to handle keep-alive connection pool when passing a custom agent - -## v1.5.1 - -- Fix: redirect mode `manual` should work even when there is no redirection or broken redirection - -## v1.5.0 - -- Enhance: rejected promise now use custom `Error` (thx to @pekeler) -- Enhance: `FetchError` contains `err.type` and `err.code`, allows for better error handling (thx to @pekeler) -- Enhance: basic support for redirect mode `manual` and `error`, allows for location header extraction (thx to @jimmywarting for the initial PR) - -## v1.4.1 - -- Fix: wrapping Request instance with FormData body again should preserve the body as-is - -## v1.4.0 - -- Enhance: Request and Response now have `clone` method (thx to @kirill-konshin for the initial PR) -- Enhance: Request and Response now have proper string and buffer body support (thx to @kirill-konshin) -- Enhance: Body constructor has been refactored out (thx to @kirill-konshin) -- Enhance: Headers now has `forEach` method (thx to @tricoder42) -- Enhance: back to 100% code coverage -- Fix: better form-data support (thx to @item4) -- Fix: better character encoding detection under chunked encoding (thx to @dsuket for the initial PR) - -## v1.3.3 - -- Fix: make sure `Content-Length` header is set when body is string for POST/PUT/PATCH requests -- Fix: handle body stream error, for cases such as incorrect `Content-Encoding` header -- Fix: when following certain redirects, use `GET` on subsequent request per Fetch Spec -- Fix: `Request` and `Response` constructors now parse headers input using `Headers` - -## v1.3.2 - -- Enhance: allow auto detect of form-data input (no `FormData` spec on node.js, this is form-data specific feature) - -## v1.3.1 - -- Enhance: allow custom host header to be set (server-side only feature, as it's a forbidden header on client-side) - -## v1.3.0 - -- Enhance: now `fetch.Request` is exposed as well - -## v1.2.1 - -- Enhance: `Headers` now normalized `Number` value to `String`, prevent common mistakes - -## v1.2.0 - -- Enhance: now fetch.Headers and fetch.Response are exposed, making testing easier - -## v1.1.2 - -- Fix: `Headers` should only support `String` and `Array` properties, and ignore others - -## v1.1.1 - -- Enhance: now req.headers accept both plain object and `Headers` instance - -## v1.1.0 - -- Enhance: timeout now also applies to response body (in case of slow response) -- Fix: timeout is now cleared properly when fetch is done/has failed - -## v1.0.6 - -- Fix: less greedy content-type charset matching - -## v1.0.5 - -- Fix: when `follow = 0`, fetch should not follow redirect -- Enhance: update tests for better coverage -- Enhance: code formatting -- Enhance: clean up doc - -## v1.0.4 - -- Enhance: test iojs support -- Enhance: timeout attached to socket event only fire once per redirect - -## v1.0.3 - -- Fix: response size limit should reject large chunk -- Enhance: added character encoding detection for xml, such as rss/atom feed (encoding in DTD) - -## v1.0.2 - -- Fix: added res.ok per spec change - -## v1.0.0 - -- Enhance: better test coverage and doc - - -# 0.x release - -## v0.1 - -- Major: initial public release diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md deleted file mode 100644 index 660ffec..0000000 --- a/node_modules/node-fetch/LICENSE.md +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md deleted file mode 100644 index cb19901..0000000 --- a/node_modules/node-fetch/README.md +++ /dev/null @@ -1,583 +0,0 @@ -node-fetch -========== - -[![npm version][npm-image]][npm-url] -[![build status][travis-image]][travis-url] -[![coverage status][codecov-image]][codecov-url] -[![install size][install-size-image]][install-size-url] - -A light-weight module that brings `window.fetch` to Node.js - -(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) - - - -- [Motivation](#motivation) -- [Features](#features) -- [Difference from client-side fetch](#difference-from-client-side-fetch) -- [Installation](#installation) -- [Loading and configuring the module](#loading-and-configuring-the-module) -- [Common Usage](#common-usage) - - [Plain text or HTML](#plain-text-or-html) - - [JSON](#json) - - [Simple Post](#simple-post) - - [Post with JSON](#post-with-json) - - [Post with form parameters](#post-with-form-parameters) - - [Handling exceptions](#handling-exceptions) - - [Handling client and server errors](#handling-client-and-server-errors) -- [Advanced Usage](#advanced-usage) - - [Streams](#streams) - - [Buffer](#buffer) - - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) - - [Extract Set-Cookie Header](#extract-set-cookie-header) - - [Post data using a file stream](#post-data-using-a-file-stream) - - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) - - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) -- [API](#api) - - [fetch(url[, options])](#fetchurl-options) - - [Options](#options) - - [Class: Request](#class-request) - - [Class: Response](#class-response) - - [Class: Headers](#class-headers) - - [Interface: Body](#interface-body) - - [Class: FetchError](#class-fetcherror) -- [License](#license) -- [Acknowledgement](#acknowledgement) - - - -## Motivation - -Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. - -See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). - -## Features - -- Stay consistent with `window.fetch` API. -- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. -- Use native promise, but allow substituting it with [insert your favorite promise library]. -- Use native Node streams for body, on both request and response. -- Decode content encoding (gzip/deflate) properly, and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. -- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. - -## Difference from client-side fetch - -- See [Known Differences](LIMITS.md) for details. -- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. -- Pull requests are welcomed too! - -## Installation - -Current stable release (`2.x`) - -```sh -$ npm install node-fetch --save -``` - -## Loading and configuring the module -We suggest you load the module via `require`, pending the stabalizing of es modules in node: -```js -const fetch = require('node-fetch'); -``` - -If you are using a Promise library other than native, set it through fetch.Promise: -```js -const Bluebird = require('bluebird'); - -fetch.Promise = Bluebird; -``` - -## Common Usage - -NOTE: The documentation below is up-to-date with `2.x` releases, [see `1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. - -#### Plain text or HTML -```js -fetch('https://github.com/') - .then(res => res.text()) - .then(body => console.log(body)); -``` - -#### JSON - -```js - -fetch('https://api.github.com/users/github') - .then(res => res.json()) - .then(json => console.log(json)); -``` - -#### Simple Post -```js -fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) - .then(res => res.json()) // expecting a json response - .then(json => console.log(json)); -``` - -#### Post with JSON - -```js -const body = { a: 1 }; - -fetch('https://httpbin.org/post', { - method: 'post', - body: JSON.stringify(body), - headers: { 'Content-Type': 'application/json' }, - }) - .then(res => res.json()) - .then(json => console.log(json)); -``` - -#### Post with form parameters -`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. - -NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: - -```js -const { URLSearchParams } = require('url'); - -const params = new URLSearchParams(); -params.append('a', 1); - -fetch('https://httpbin.org/post', { method: 'POST', body: params }) - .then(res => res.json()) - .then(json => console.log(json)); -``` - -#### Handling exceptions -NOTE: 3xx-5xx responses are *NOT* exceptions, and should be handled in `then()`, see the next section. - -Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, like network errors, and operational errors which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. - -```js -fetch('https://domain.invalid/') - .catch(err => console.error(err)); -``` - -#### Handling client and server errors -It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: - -```js -function checkStatus(res) { - if (res.ok) { // res.status >= 200 && res.status < 300 - return res; - } else { - throw MyCustomError(res.statusText); - } -} - -fetch('https://httpbin.org/status/400') - .then(checkStatus) - .then(res => console.log('will not get here...')) -``` - -## Advanced Usage - -#### Streams -The "Node.js way" is to use streams when possible: - -```js -fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') - .then(res => { - const dest = fs.createWriteStream('./octocat.png'); - res.body.pipe(dest); - }); -``` - -#### Buffer -If you prefer to cache binary data in full, use buffer(). (NOTE: buffer() is a `node-fetch` only API) - -```js -const fileType = require('file-type'); - -fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') - .then(res => res.buffer()) - .then(buffer => fileType(buffer)) - .then(type => { /* ... */ }); -``` - -#### Accessing Headers and other Meta data -```js -fetch('https://github.com/') - .then(res => { - console.log(res.ok); - console.log(res.status); - console.log(res.statusText); - console.log(res.headers.raw()); - console.log(res.headers.get('content-type')); - }); -``` - -#### Extract Set-Cookie Header - -Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`, this is a `node-fetch` only API. - -```js -fetch(url).then(res => { - // returns an array of values, instead of a string of comma-separated values - console.log(res.headers.raw()['set-cookie']); -}); -``` - -#### Post data using a file stream - -```js -const { createReadStream } = require('fs'); - -const stream = createReadStream('input.txt'); - -fetch('https://httpbin.org/post', { method: 'POST', body: stream }) - .then(res => res.json()) - .then(json => console.log(json)); -``` - -#### Post with form-data (detect multipart) - -```js -const FormData = require('form-data'); - -const form = new FormData(); -form.append('a', 1); - -fetch('https://httpbin.org/post', { method: 'POST', body: form }) - .then(res => res.json()) - .then(json => console.log(json)); - -// OR, using custom headers -// NOTE: getHeaders() is non-standard API - -const form = new FormData(); -form.append('a', 1); - -const options = { - method: 'POST', - body: form, - headers: form.getHeaders() -} - -fetch('https://httpbin.org/post', options) - .then(res => res.json()) - .then(json => console.log(json)); -``` - -#### Request cancellation with AbortSignal - -> NOTE: You may only cancel streamed requests on Node >= v8.0.0 - -You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). - -An example of timing out a request after 150ms could be achieved as follows: - -```js -import AbortController from 'abort-controller'; - -const controller = new AbortController(); -const timeout = setTimeout( - () => { controller.abort(); }, - 150, -); - -fetch(url, { signal: controller.signal }) - .then(res => res.json()) - .then( - data => { - useData(data) - }, - err => { - if (err.name === 'AbortError') { - // request was aborted - } - }, - ) - .finally(() => { - clearTimeout(timeout); - }); -``` - -See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. - - -## API - -### fetch(url[, options]) - -- `url` A string representing the URL for fetching -- `options` [Options](#fetch-options) for the HTTP(S) request -- Returns: Promise<[Response](#class-response)> - -Perform an HTTP(S) fetch. - -`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected promise. - - -### Options - -The default values are shown after each option key. - -```js -{ - // These properties are part of the Fetch Standard - method: 'GET', - headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) - body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream - redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect - signal: null, // pass an instance of AbortSignal to optionally abort requests - - // The following properties are node-fetch extensions - follow: 20, // maximum redirect count. 0 to not follow redirect - timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. - compress: true, // support gzip/deflate content encoding. false to disable - size: 0, // maximum response body size in bytes. 0 to disable - agent: null // http(s).Agent instance or function that returns an instance (see below) -} -``` - -##### Default Headers - -If no values are set, the following request headers will be sent automatically: - -Header | Value -------------------- | -------------------------------------------------------- -`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ -`Accept` | `*/*` -`Connection` | `close` _(when no `options.agent` is present)_ -`Content-Length` | _(automatically calculated, if possible)_ -`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ -`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` - -Note: when `body` is a `Stream`, `Content-Length` is not set automatically. - -##### Custom Agent - -The `agent` option allows you to specify networking related options that's out of the scope of Fetch. Including and not limit to: - -- Support self-signed certificate -- Use only IPv4 or IPv6 -- Custom DNS Lookup - -See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. - -In addition, `agent` option accepts a function that returns http(s).Agent instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. - -```js -const httpAgent = new http.Agent({ - keepAlive: true -}); -const httpsAgent = new https.Agent({ - keepAlive: true -}); - -const options = { - agent: function (_parsedURL) { - if (_parsedURL.protocol == 'http:') { - return httpAgent; - } else { - return httpsAgent; - } - } -} -``` - - -### Class: Request - -An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. - -Due to the nature of Node.js, the following properties are not implemented at this moment: - -- `type` -- `destination` -- `referrer` -- `referrerPolicy` -- `mode` -- `credentials` -- `cache` -- `integrity` -- `keepalive` - -The following node-fetch extension properties are provided: - -- `follow` -- `compress` -- `counter` -- `agent` - -See [options](#fetch-options) for exact meaning of these extensions. - -#### new Request(input[, options]) - -*(spec-compliant)* - -- `input` A string representing a URL, or another `Request` (which will be cloned) -- `options` [Options][#fetch-options] for the HTTP(S) request - -Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). - -In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. - - -### Class: Response - -An HTTP(S) response. This class implements the [Body](#iface-body) interface. - -The following properties are not implemented in node-fetch at this moment: - -- `Response.error()` -- `Response.redirect()` -- `type` -- `trailer` - -#### new Response([body[, options]]) - -*(spec-compliant)* - -- `body` A string or [Readable stream][node-readable] -- `options` A [`ResponseInit`][response-init] options dictionary - -Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). - -Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. - -#### response.ok - -*(spec-compliant)* - -Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. - -#### response.redirected - -*(spec-compliant)* - -Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. - - -### Class: Headers - -This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. - -#### new Headers([init]) - -*(spec-compliant)* - -- `init` Optional argument to pre-fill the `Headers` object - -Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object, or any iterable object. - -```js -// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class - -const meta = { - 'Content-Type': 'text/xml', - 'Breaking-Bad': '<3' -}; -const headers = new Headers(meta); - -// The above is equivalent to -const meta = [ - [ 'Content-Type', 'text/xml' ], - [ 'Breaking-Bad', '<3' ] -]; -const headers = new Headers(meta); - -// You can in fact use any iterable objects, like a Map or even another Headers -const meta = new Map(); -meta.set('Content-Type', 'text/xml'); -meta.set('Breaking-Bad', '<3'); -const headers = new Headers(meta); -const copyOfHeaders = new Headers(headers); -``` - - -### Interface: Body - -`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. - -The following methods are not yet implemented in node-fetch at this moment: - -- `formData()` - -#### body.body - -*(deviation from spec)* - -* Node.js [`Readable` stream][node-readable] - -The data encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. - -#### body.bodyUsed - -*(spec-compliant)* - -* `Boolean` - -A boolean property for if this body has been consumed. Per spec, a consumed body cannot be used again. - -#### body.arrayBuffer() -#### body.blob() -#### body.json() -#### body.text() - -*(spec-compliant)* - -* Returns: Promise - -Consume the body and return a promise that will resolve to one of these formats. - -#### body.buffer() - -*(node-fetch extension)* - -* Returns: Promise<Buffer> - -Consume the body and return a promise that will resolve to a Buffer. - -#### body.textConverted() - -*(node-fetch extension)* - -* Returns: Promise<String> - -Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible. - -(This API requires an optional dependency on npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) - - -### Class: FetchError - -*(node-fetch extension)* - -An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. - - -### Class: AbortError - -*(node-fetch extension)* - -An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. - -## Acknowledgement - -Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. - -`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). - -## License - -MIT - -[npm-image]: https://flat.badgen.net/npm/v/node-fetch -[npm-url]: https://www.npmjs.com/package/node-fetch -[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch -[travis-url]: https://travis-ci.org/bitinn/node-fetch -[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master -[codecov-url]: https://codecov.io/gh/bitinn/node-fetch -[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch -[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch -[whatwg-fetch]: https://fetch.spec.whatwg.org/ -[response-init]: https://fetch.spec.whatwg.org/#responseinit -[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams -[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers -[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md -[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md -[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js deleted file mode 100644 index 0ad5de0..0000000 --- a/node_modules/node-fetch/browser.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; - -// ref: https://github.com/tc39/proposal-global -var getGlobal = function () { - // the only reliable means to get the global object is - // `Function('return this')()` - // However, this causes CSP violations in Chrome apps. - if (typeof self !== 'undefined') { return self; } - if (typeof window !== 'undefined') { return window; } - if (typeof global !== 'undefined') { return global; } - throw new Error('unable to locate global object'); -} - -var global = getGlobal(); - -module.exports = exports = global.fetch; - -// Needed for TypeScript and Webpack. -exports.default = global.fetch.bind(global); - -exports.Headers = global.Headers; -exports.Request = global.Request; -exports.Response = global.Response; \ No newline at end of file diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js deleted file mode 100644 index 37d022c..0000000 --- a/node_modules/node-fetch/lib/index.es.js +++ /dev/null @@ -1,1633 +0,0 @@ -process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); - -import Stream from 'stream'; -import http from 'http'; -import Url from 'url'; -import https from 'https'; -import zlib from 'zlib'; - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = require('encoding').convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout - }; - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -export default fetch; -export { Headers, Request, Response, FetchError }; diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js deleted file mode 100644 index daa44bc..0000000 --- a/node_modules/node-fetch/lib/index.js +++ /dev/null @@ -1,1642 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(require('stream')); -var http = _interopDefault(require('http')); -var Url = _interopDefault(require('url')); -var https = _interopDefault(require('https')); -var zlib = _interopDefault(require('zlib')); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = require('encoding').convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout - }; - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs deleted file mode 100644 index e571ea6..0000000 --- a/node_modules/node-fetch/lib/index.mjs +++ /dev/null @@ -1,1631 +0,0 @@ -import Stream from 'stream'; -import http from 'http'; -import Url from 'url'; -import https from 'https'; -import zlib from 'zlib'; - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = require('encoding').convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout - }; - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -export default fetch; -export { Headers, Request, Response, FetchError }; diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json deleted file mode 100644 index 0a77023..0000000 --- a/node_modules/node-fetch/package.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "_from": "node-fetch@^2.6.0", - "_id": "node-fetch@2.6.0", - "_inBundle": false, - "_integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", - "_location": "/node-fetch", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "node-fetch@^2.6.0", - "name": "node-fetch", - "escapedName": "node-fetch", - "rawSpec": "^2.6.0", - "saveSpec": null, - "fetchSpec": "^2.6.0" - }, - "_requiredBy": [ - "/async-neocities", - "/auto-changelog" - ], - "_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", - "_shasum": "e633456386d4aa55863f676a7ab0daa8fdecb0fd", - "_spec": "node-fetch@^2.6.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "David Frank" - }, - "browser": "./browser.js", - "bugs": { - "url": "https://github.com/bitinn/node-fetch/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "A light-weight module that brings window.fetch to node.js", - "devDependencies": { - "@ungap/url-search-params": "^0.1.2", - "abort-controller": "^1.1.0", - "abortcontroller-polyfill": "^1.3.0", - "babel-core": "^6.26.3", - "babel-plugin-istanbul": "^4.1.6", - "babel-preset-env": "^1.6.1", - "babel-register": "^6.16.3", - "chai": "^3.5.0", - "chai-as-promised": "^7.1.1", - "chai-iterator": "^1.1.1", - "chai-string": "~1.3.0", - "codecov": "^3.3.0", - "cross-env": "^5.2.0", - "form-data": "^2.3.3", - "is-builtin-module": "^1.0.0", - "mocha": "^5.0.0", - "nyc": "11.9.0", - "parted": "^0.1.1", - "promise": "^8.0.3", - "resumer": "0.0.0", - "rollup": "^0.63.4", - "rollup-plugin-babel": "^3.0.7", - "string-to-arraybuffer": "^1.0.2", - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "files": [ - "lib/index.js", - "lib/index.mjs", - "lib/index.es.js", - "browser.js" - ], - "homepage": "https://github.com/bitinn/node-fetch", - "keywords": [ - "fetch", - "http", - "promise" - ], - "license": "MIT", - "main": "lib/index", - "module": "lib/index.mjs", - "name": "node-fetch", - "repository": { - "type": "git", - "url": "git+https://github.com/bitinn/node-fetch.git" - }, - "scripts": { - "build": "cross-env BABEL_ENV=rollup rollup -c", - "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json", - "prepare": "npm run build", - "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", - "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js" - }, - "version": "2.6.0" -} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/once/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md deleted file mode 100644 index 1f1ffca..0000000 --- a/node_modules/once/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# once - -Only call a function once. - -## usage - -```javascript -var once = require('once') - -function load (file, cb) { - cb = once(cb) - loader.load('file') - loader.once('load', cb) - loader.once('error', cb) -} -``` - -Or add to the Function.prototype in a responsible way: - -```javascript -// only has to be done once -require('once').proto() - -function load (file, cb) { - cb = cb.once() - loader.load('file') - loader.once('load', cb) - loader.once('error', cb) -} -``` - -Ironically, the prototype feature makes this module twice as -complicated as necessary. - -To check whether you function has been called, use `fn.called`. Once the -function is called for the first time the return value of the original -function is saved in `fn.value` and subsequent calls will continue to -return this value. - -```javascript -var once = require('once') - -function load (cb) { - cb = once(cb) - var stream = createStream() - stream.once('data', cb) - stream.once('end', function () { - if (!cb.called) cb(new Error('not found')) - }) -} -``` - -## `once.strict(func)` - -Throw an error if the function is called twice. - -Some functions are expected to be called only once. Using `once` for them would -potentially hide logical errors. - -In the example below, the `greet` function has to call the callback only once: - -```javascript -function greet (name, cb) { - // return is missing from the if statement - // when no name is passed, the callback is called twice - if (!name) cb('Hello anonymous') - cb('Hello ' + name) -} - -function log (msg) { - console.log(msg) -} - -// this will print 'Hello anonymous' but the logical error will be missed -greet(null, once(msg)) - -// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time -greet(null, once.strict(msg)) -``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js deleted file mode 100644 index 2354067..0000000 --- a/node_modules/once/once.js +++ /dev/null @@ -1,42 +0,0 @@ -var wrappy = require('wrappy') -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} diff --git a/node_modules/once/package.json b/node_modules/once/package.json deleted file mode 100644 index 9d11995..0000000 --- a/node_modules/once/package.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "_from": "once@^1.3.1", - "_id": "once@1.4.0", - "_inBundle": false, - "_integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "_location": "/once", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "once@^1.3.1", - "name": "once", - "escapedName": "once", - "rawSpec": "^1.3.1", - "saveSpec": null, - "fetchSpec": "^1.3.1" - }, - "_requiredBy": [ - "/budo", - "/end-of-stream", - "/glob", - "/inflight", - "/pump" - ], - "_resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "_shasum": "583b1aa775961d4b113ac17d9c50baef9dd76bd1", - "_spec": "once@^1.3.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pump", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/once/issues" - }, - "bundleDependencies": false, - "dependencies": { - "wrappy": "1" - }, - "deprecated": false, - "description": "Run a function exactly one time", - "devDependencies": { - "tap": "^7.0.1" - }, - "directories": { - "test": "test" - }, - "files": [ - "once.js" - ], - "homepage": "https://github.com/isaacs/once#readme", - "keywords": [ - "once", - "function", - "one", - "single" - ], - "license": "ISC", - "main": "once.js", - "name": "once", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/once.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "version": "1.4.0" -} diff --git a/node_modules/pretty-bytes/index.d.ts b/node_modules/pretty-bytes/index.d.ts deleted file mode 100644 index 9a64626..0000000 --- a/node_modules/pretty-bytes/index.d.ts +++ /dev/null @@ -1,66 +0,0 @@ -declare namespace prettyBytes { - interface Options { - /** - Include plus sign for positive numbers. If the difference is exactly zero a space character will be prepended instead for better alignment. - - @default false - */ - readonly signed?: boolean; - - /** - - If `false`: Output won't be localized. - - If `true`: Localize the output using the system/browser locale. - - If `string`: Expects a [BCP 47 language tag](https://en.wikipedia.org/wiki/IETF_language_tag) (For example: `en`, `de`, …) - - __Note:__ Localization should generally work in browsers. Node.js needs to be [built](https://github.com/nodejs/node/wiki/Intl) with `full-icu` or `system-icu`. Alternatively, the [`full-icu`](https://github.com/unicode-org/full-icu-npm) module can be used to provide support at runtime. - - @default false - */ - readonly locale?: boolean | string; - - /** - Format the number as [bits](https://en.wikipedia.org/wiki/Bit) instead of [bytes](https://en.wikipedia.org/wiki/Byte). This can be useful when, for example, referring to [bit rate](https://en.wikipedia.org/wiki/Bit_rate). - - @default false - - ``` - import prettyBytes = require('pretty-bytes'); - - prettyBytes(1337, {bits: true}); - //=> '1.34 kbit' - ``` - */ - readonly bits?: boolean; - } -} - -/** -Convert bytes to a human readable string: `1337` → `1.34 kB`. - -@param number - The number to format. - -@example -``` -import prettyBytes = require('pretty-bytes'); - -prettyBytes(1337); -//=> '1.34 kB' - -prettyBytes(100); -//=> '100 B' - -// Display file size differences -prettyBytes(42, {signed: true}); -//=> '+42 B' - -// Localized output using German locale -prettyBytes(1337, {locale: 'de'}); -//=> '1,34 kB' -``` -*/ -declare function prettyBytes( - number: number, - options?: prettyBytes.Options -): string; - -export = prettyBytes; diff --git a/node_modules/pretty-bytes/index.js b/node_modules/pretty-bytes/index.js deleted file mode 100644 index b7a0bac..0000000 --- a/node_modules/pretty-bytes/index.js +++ /dev/null @@ -1,76 +0,0 @@ -'use strict'; - -const BYTE_UNITS = [ - 'B', - 'kB', - 'MB', - 'GB', - 'TB', - 'PB', - 'EB', - 'ZB', - 'YB' -]; - -const BIT_UNITS = [ - 'b', - 'kbit', - 'Mbit', - 'Gbit', - 'Tbit', - 'Pbit', - 'Ebit', - 'Zbit', - 'Ybit' -]; - -/* -Formats the given number using `Number#toLocaleString`. -- If locale is a string, the value is expected to be a locale-key (for example: `de`). -- If locale is true, the system default locale is used for translation. -- If no value for locale is specified, the number is returned unmodified. -*/ -const toLocaleString = (number, locale) => { - let result = number; - if (typeof locale === 'string') { - result = number.toLocaleString(locale); - } else if (locale === true) { - result = number.toLocaleString(); - } - - return result; -}; - -module.exports = (number, options) => { - if (!Number.isFinite(number)) { - throw new TypeError(`Expected a finite number, got ${typeof number}: ${number}`); - } - - options = Object.assign({bits: false}, options); - const UNITS = options.bits ? BIT_UNITS : BYTE_UNITS; - - if (options.signed && number === 0) { - return ' 0 ' + UNITS[0]; - } - - const isNegative = number < 0; - const prefix = isNegative ? '-' : (options.signed ? '+' : ''); - - if (isNegative) { - number = -number; - } - - if (number < 1) { - const numberString = toLocaleString(number, options.locale); - return prefix + numberString + ' ' + UNITS[0]; - } - - const exponent = Math.min(Math.floor(Math.log10(number) / 3), UNITS.length - 1); - // eslint-disable-next-line unicorn/prefer-exponentiation-operator - number = Number((number / Math.pow(1000, exponent)).toPrecision(3)); - const numberString = toLocaleString(number, options.locale); - - const unit = UNITS[exponent]; - - return prefix + numberString + ' ' + unit; -}; diff --git a/node_modules/pretty-bytes/license b/node_modules/pretty-bytes/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/pretty-bytes/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pretty-bytes/package.json b/node_modules/pretty-bytes/package.json deleted file mode 100644 index a3b6237..0000000 --- a/node_modules/pretty-bytes/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "_from": "pretty-bytes@^5.3.0", - "_id": "pretty-bytes@5.3.0", - "_inBundle": false, - "_integrity": "sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg==", - "_location": "/pretty-bytes", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pretty-bytes@^5.3.0", - "name": "pretty-bytes", - "escapedName": "pretty-bytes", - "rawSpec": "^5.3.0", - "saveSpec": null, - "fetchSpec": "^5.3.0" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.3.0.tgz", - "_shasum": "f2849e27db79fb4d6cfe24764fc4134f165989f2", - "_spec": "pretty-bytes@^5.3.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "bugs": { - "url": "https://github.com/sindresorhus/pretty-bytes/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "Convert bytes to a human readable string: 1337 → 1.34 kB", - "devDependencies": { - "ava": "^1.4.1", - "full-icu": "^1.2.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - }, - "engines": { - "node": ">=6" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "homepage": "https://github.com/sindresorhus/pretty-bytes#readme", - "keywords": [ - "pretty", - "bytes", - "byte", - "filesize", - "size", - "file", - "human", - "humanized", - "readable", - "si", - "data", - "locale", - "localization", - "localized" - ], - "license": "MIT", - "name": "pretty-bytes", - "repository": { - "type": "git", - "url": "git+https://github.com/sindresorhus/pretty-bytes.git" - }, - "scripts": { - "test": "xo && NODE_ICU_DATA=node_modules/full-icu ava && tsd" - }, - "version": "5.3.0" -} diff --git a/node_modules/pretty-bytes/readme.md b/node_modules/pretty-bytes/readme.md deleted file mode 100644 index 0f77498..0000000 --- a/node_modules/pretty-bytes/readme.md +++ /dev/null @@ -1,89 +0,0 @@ -# pretty-bytes [![Build Status](https://travis-ci.org/sindresorhus/pretty-bytes.svg?branch=master)](https://travis-ci.org/sindresorhus/pretty-bytes) - -> Convert bytes to a human readable string: `1337` → `1.34 kB` - -Useful for displaying file sizes for humans. - -*Note that it uses base-10 (e.g. kilobyte). -[Read about the difference between kilobyte and kibibyte.](https://web.archive.org/web/20150324153922/https://pacoup.com/2009/05/26/kb-kb-kib-whats-up-with-that/)* - - -## Install - -``` -$ npm install pretty-bytes -``` - - -## Usage - -```js -const prettyBytes = require('pretty-bytes'); - -prettyBytes(1337); -//=> '1.34 kB' - -prettyBytes(100); -//=> '100 B' - -// Display with units of bits -prettyBytes(1337, {bits: true}); -//=> '1.34 kbit' - -// Display file size differences -prettyBytes(42, {signed: true}); -//=> '+42 B' - -// Localized output using German locale -prettyBytes(1337, {locale: 'de'}); -//=> '1,34 kB' -``` - - -## API - -### prettyBytes(number, [options]) - -#### number - -Type: `number` - -The number to format. - -#### options - -Type: `object` - -##### signed - -Type: `boolean`
-Default: `false` - -Include plus sign for positive numbers. If the difference is exactly zero a space character will be prepended instead for better alignment. - -##### bits - -Type: `boolean`
-Default: `false` - -Format the number as [bits](https://en.wikipedia.org/wiki/Bit) instead of [bytes](https://en.wikipedia.org/wiki/Byte). This can be useful when, for example, referring to [bit rate](https://en.wikipedia.org/wiki/Bit_rate). - -##### locale - -Type: `boolean` `string`
-Default: `false` *(No localization)* - -- If `true`: Localize the output using the system/browser locale. -- If `string`: Expects a [BCP 47 language tag](https://en.wikipedia.org/wiki/IETF_language_tag) (For example: `en`, `de`, …) - -**Note:** Localization should generally work in browsers. Node.js needs to be [built](https://github.com/nodejs/node/wiki/Intl) with `full-icu` or `system-icu`. Alternatively, the [`full-icu`](https://github.com/unicode-org/full-icu-npm) module can be used to provide support at runtime. - - -## Related - -- [pretty-bytes-cli](https://github.com/sindresorhus/pretty-bytes-cli) - CLI for this module - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/pump/.travis.yml b/node_modules/pump/.travis.yml deleted file mode 100644 index 17f9433..0000000 --- a/node_modules/pump/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: node_js -node_js: - - "0.10" - -script: "npm test" diff --git a/node_modules/pump/LICENSE b/node_modules/pump/LICENSE deleted file mode 100644 index 757562e..0000000 --- a/node_modules/pump/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/pump/README.md b/node_modules/pump/README.md deleted file mode 100644 index 4c81471..0000000 --- a/node_modules/pump/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# pump - -pump is a small node module that pipes streams together and destroys all of them if one of them closes. - -``` -npm install pump -``` - -[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump) - -## What problem does it solve? - -When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error. -You are also not able to provide a callback to tell when then pipe has finished. - -pump does these two things for you - -## Usage - -Simply pass the streams you want to pipe together to pump and add an optional callback - -``` js -var pump = require('pump') -var fs = require('fs') - -var source = fs.createReadStream('/dev/random') -var dest = fs.createWriteStream('/dev/null') - -pump(source, dest, function(err) { - console.log('pipe finished', err) -}) - -setTimeout(function() { - dest.destroy() // when dest is closed pump will destroy source -}, 1000) -``` - -You can use pump to pipe more than two streams together as well - -``` js -var transform = someTransformStream() - -pump(source, transform, anotherTransform, dest, function(err) { - console.log('pipe finished', err) -}) -``` - -If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed. - -Similarly to `stream.pipe()`, `pump()` returns the last stream passed in, so you can do: - -``` -return pump(s1, s2) // returns s2 -``` - -If you want to return a stream that combines *both* s1 and s2 to a single stream use -[pumpify](https://github.com/mafintosh/pumpify) instead. - -## License - -MIT - -## Related - -`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/pump/index.js b/node_modules/pump/index.js deleted file mode 100644 index c15059f..0000000 --- a/node_modules/pump/index.js +++ /dev/null @@ -1,82 +0,0 @@ -var once = require('once') -var eos = require('end-of-stream') -var fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes - -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) - -var isFn = function (fn) { - return typeof fn === 'function' -} - -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) -} - -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) -} - -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) - - var closed = false - stream.on('close', function () { - closed = true - }) - - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) - - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true - - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want - - if (isFn(stream.destroy)) return stream.destroy() - - callback(err || new Error('stream was destroyed')) - } -} - -var call = function (fn) { - fn() -} - -var pipe = function (from, to) { - return from.pipe(to) -} - -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') - - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) - - return streams.reduce(pipe) -} - -module.exports = pump diff --git a/node_modules/pump/package.json b/node_modules/pump/package.json deleted file mode 100644 index 696ccac..0000000 --- a/node_modules/pump/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_from": "pump@^3.0.0", - "_id": "pump@3.0.0", - "_inBundle": false, - "_integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "_location": "/pump", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pump@^3.0.0", - "name": "pump", - "escapedName": "pump", - "rawSpec": "^3.0.0", - "saveSpec": null, - "fetchSpec": "^3.0.0" - }, - "_requiredBy": [ - "/async-neocities", - "/pumpify" - ], - "_resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "_shasum": "b4a2116815bde2f4e1ea602354e8c75565107a64", - "_spec": "pump@^3.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Mathias Buus Madsen", - "email": "mathiasbuus@gmail.com" - }, - "browser": { - "fs": false - }, - "bugs": { - "url": "https://github.com/mafintosh/pump/issues" - }, - "bundleDependencies": false, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - }, - "deprecated": false, - "description": "pipe streams together and close all of them if one of them closes", - "homepage": "https://github.com/mafintosh/pump#readme", - "keywords": [ - "streams", - "pipe", - "destroy", - "callback" - ], - "license": "MIT", - "name": "pump", - "repository": { - "type": "git", - "url": "git://github.com/mafintosh/pump.git" - }, - "scripts": { - "test": "node test-browser.js && node test-node.js" - }, - "version": "3.0.0" -} diff --git a/node_modules/pump/test-browser.js b/node_modules/pump/test-browser.js deleted file mode 100644 index 9a06c8a..0000000 --- a/node_modules/pump/test-browser.js +++ /dev/null @@ -1,66 +0,0 @@ -var stream = require('stream') -var pump = require('./index') - -var rs = new stream.Readable() -var ws = new stream.Writable() - -rs._read = function (size) { - this.push(Buffer(size).fill('abc')) -} - -ws._write = function (chunk, encoding, cb) { - setTimeout(function () { - cb() - }, 100) -} - -var toHex = function () { - var reverse = new (require('stream').Transform)() - - reverse._transform = function (chunk, enc, callback) { - reverse.push(chunk.toString('hex')) - callback() - } - - return reverse -} - -var wsClosed = false -var rsClosed = false -var callbackCalled = false - -var check = function () { - if (wsClosed && rsClosed && callbackCalled) { - console.log('test-browser.js passes') - clearTimeout(timeout) - } -} - -ws.on('finish', function () { - wsClosed = true - check() -}) - -rs.on('end', function () { - rsClosed = true - check() -}) - -var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { - callbackCalled = true - check() -}) - -if (res !== ws) { - throw new Error('should return last stream') -} - -setTimeout(function () { - rs.push(null) - rs.emit('close') -}, 1000) - -var timeout = setTimeout(function () { - check() - throw new Error('timeout') -}, 5000) diff --git a/node_modules/pump/test-node.js b/node_modules/pump/test-node.js deleted file mode 100644 index 561251a..0000000 --- a/node_modules/pump/test-node.js +++ /dev/null @@ -1,53 +0,0 @@ -var pump = require('./index') - -var rs = require('fs').createReadStream('/dev/random') -var ws = require('fs').createWriteStream('/dev/null') - -var toHex = function () { - var reverse = new (require('stream').Transform)() - - reverse._transform = function (chunk, enc, callback) { - reverse.push(chunk.toString('hex')) - callback() - } - - return reverse -} - -var wsClosed = false -var rsClosed = false -var callbackCalled = false - -var check = function () { - if (wsClosed && rsClosed && callbackCalled) { - console.log('test-node.js passes') - clearTimeout(timeout) - } -} - -ws.on('close', function () { - wsClosed = true - check() -}) - -rs.on('close', function () { - rsClosed = true - check() -}) - -var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { - callbackCalled = true - check() -}) - -if (res !== ws) { - throw new Error('should return last stream') -} - -setTimeout(function () { - rs.destroy() -}, 1000) - -var timeout = setTimeout(function () { - throw new Error('timeout') -}, 5000) diff --git a/node_modules/pumpify/.travis.yml b/node_modules/pumpify/.travis.yml deleted file mode 100644 index d385d9e..0000000 --- a/node_modules/pumpify/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: node_js - -node_js: - - '10' - - '12' - -sudo: false diff --git a/node_modules/pumpify/LICENSE b/node_modules/pumpify/LICENSE deleted file mode 100644 index 757562e..0000000 --- a/node_modules/pumpify/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/pumpify/README.md b/node_modules/pumpify/README.md deleted file mode 100644 index 4988f7b..0000000 --- a/node_modules/pumpify/README.md +++ /dev/null @@ -1,56 +0,0 @@ -# pumpify - -Combine an array of streams into a single duplex stream using [pump](https://github.com/mafintosh/pump) and [duplexify](https://github.com/mafintosh/duplexify). -If one of the streams closes/errors all streams in the pipeline will be destroyed. - -``` -npm install pumpify -``` - -[![build status](http://img.shields.io/travis/mafintosh/pumpify.svg?style=flat)](http://travis-ci.org/mafintosh/pumpify) - -## Usage - -Pass the streams you want to pipe together to pumpify `pipeline = pumpify(s1, s2, s3, ...)`. -`pipeline` is a duplex stream that writes to the first streams and reads from the last one. -Streams are piped together using [pump](https://github.com/mafintosh/pump) so if one of them closes -all streams will be destroyed. - -``` js -var pumpify = require('pumpify') -var tar = require('tar-fs') -var zlib = require('zlib') -var fs = require('fs') - -var untar = pumpify(zlib.createGunzip(), tar.extract('output-folder')) -// you can also pass an array instead -// var untar = pumpify([zlib.createGunzip(), tar.extract('output-folder')]) - -fs.createReadStream('some-gzipped-tarball.tgz').pipe(untar) -``` - -If you are pumping object streams together use `pipeline = pumpify.obj(s1, s2, ...)`. -Call `pipeline.destroy()` to destroy the pipeline (including the streams passed to pumpify). - -### Using `setPipeline(s1, s2, ...)` - -Similar to [duplexify](https://github.com/mafintosh/duplexify) you can also define the pipeline asynchronously using `setPipeline(s1, s2, ...)` - -``` js -var untar = pumpify() - -setTimeout(function() { - // will start draining the input now - untar.setPipeline(zlib.createGunzip(), tar.extract('output-folder')) -}, 1000) - -fs.createReadStream('some-gzipped-tarball.tgz').pipe(untar) -``` - -## License - -MIT - -## Related - -`pumpify` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/node_modules/pumpify/index.js b/node_modules/pumpify/index.js deleted file mode 100644 index 473e256..0000000 --- a/node_modules/pumpify/index.js +++ /dev/null @@ -1,60 +0,0 @@ -var pump = require('pump') -var inherits = require('inherits') -var Duplexify = require('duplexify') - -var toArray = function(args) { - if (!args.length) return [] - return Array.isArray(args[0]) ? args[0] : Array.prototype.slice.call(args) -} - -var define = function(opts) { - var Pumpify = function() { - var streams = toArray(arguments) - if (!(this instanceof Pumpify)) return new Pumpify(streams) - Duplexify.call(this, null, null, opts) - if (streams.length) this.setPipeline(streams) - } - - inherits(Pumpify, Duplexify) - - Pumpify.prototype.setPipeline = function() { - var streams = toArray(arguments) - var self = this - var ended = false - var w = streams[0] - var r = streams[streams.length-1] - - r = r.readable ? r : null - w = w.writable ? w : null - - var onclose = function() { - streams[0].emit('error', new Error('stream was destroyed')) - } - - this.on('close', onclose) - this.on('prefinish', function() { - if (!ended) self.cork() - }) - - pump(streams, function(err) { - self.removeListener('close', onclose) - if (err) return self.destroy(err.message === 'premature close' ? null : err) - ended = true - // pump ends after the last stream is not writable *but* - // pumpify still forwards the readable part so we need to catch errors - // still, so reenable autoDestroy in this case - if (self._autoDestroy === false) self._autoDestroy = true - self.uncork() - }) - - if (this.destroyed) return onclose() - this.setWritable(w) - this.setReadable(r) - } - - return Pumpify -} - -module.exports = define({autoDestroy:false, destroy:false}) -module.exports.obj = define({autoDestroy: false, destroy:false, objectMode:true, highWaterMark:16}) -module.exports.ctor = define diff --git a/node_modules/pumpify/package.json b/node_modules/pumpify/package.json deleted file mode 100644 index 324e9d2..0000000 --- a/node_modules/pumpify/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "_from": "pumpify@^2.0.1", - "_id": "pumpify@2.0.1", - "_inBundle": false, - "_integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", - "_location": "/pumpify", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pumpify@^2.0.1", - "name": "pumpify", - "escapedName": "pumpify", - "rawSpec": "^2.0.1", - "saveSpec": null, - "fetchSpec": "^2.0.1" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", - "_shasum": "abfc7b5a621307c728b551decbbefb51f0e4aa1e", - "_spec": "pumpify@^2.0.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Mathias Buus" - }, - "bugs": { - "url": "https://github.com/mafintosh/pumpify/issues" - }, - "bundleDependencies": false, - "dependencies": { - "duplexify": "^4.1.1", - "inherits": "^2.0.3", - "pump": "^3.0.0" - }, - "deprecated": false, - "description": "Combine an array of streams into a single duplex stream using pump and duplexify", - "devDependencies": { - "tape": "^4.10.2", - "through2": "^3.0.1" - }, - "homepage": "https://github.com/mafintosh/pumpify", - "keywords": [ - "pump", - "duplexify", - "duplex", - "streams", - "stream", - "pipeline", - "combine" - ], - "license": "MIT", - "main": "index.js", - "name": "pumpify", - "repository": { - "type": "git", - "url": "git://github.com/mafintosh/pumpify.git" - }, - "scripts": { - "test": "tape test.js" - }, - "version": "2.0.1" -} diff --git a/node_modules/pumpify/test.js b/node_modules/pumpify/test.js deleted file mode 100644 index d93d7ed..0000000 --- a/node_modules/pumpify/test.js +++ /dev/null @@ -1,238 +0,0 @@ -var tape = require('tape') -var through = require('through2') -var pumpify = require('./') -var stream = require('stream') -var duplexify = require('duplexify') - -tape('basic', function(t) { - t.plan(3) - - var pipeline = pumpify( - through(function(data, enc, cb) { - t.same(data.toString(), 'hello') - cb(null, data.toString().toUpperCase()) - }), - through(function(data, enc, cb) { - t.same(data.toString(), 'HELLO') - cb(null, data.toString().toLowerCase()) - }) - ) - - pipeline.write('hello') - pipeline.on('data', function(data) { - t.same(data.toString(), 'hello') - t.end() - }) -}) - -tape('3 times', function(t) { - t.plan(4) - - var pipeline = pumpify( - through(function(data, enc, cb) { - t.same(data.toString(), 'hello') - cb(null, data.toString().toUpperCase()) - }), - through(function(data, enc, cb) { - t.same(data.toString(), 'HELLO') - cb(null, data.toString().toLowerCase()) - }), - through(function(data, enc, cb) { - t.same(data.toString(), 'hello') - cb(null, data.toString().toUpperCase()) - }) - ) - - pipeline.write('hello') - pipeline.on('data', function(data) { - t.same(data.toString(), 'HELLO') - t.end() - }) -}) - -tape('destroy', function(t) { - var test = through() - test.destroy = function() { - t.ok(true) - t.end() - } - - var pipeline = pumpify(through(), test) - - pipeline.destroy() -}) - -tape('close', function(t) { - var test = through() - var pipeline = pumpify(through(), test) - - pipeline.on('error', function(err) { - t.same(err.message, 'lol') - t.end() - }) - - test.emit('error', new Error('lol')) -}) - -tape('end waits for last one', function(t) { - var ran = false - - var a = through() - var b = through() - var c = through(function(data, enc, cb) { - setTimeout(function() { - ran = true - cb() - }, 100) - }) - - var pipeline = pumpify(a, b, c) - - pipeline.write('foo') - pipeline.end(function() { - t.ok(ran) - t.end() - }) - - t.ok(!ran) -}) - -tape('always wait for finish', function(t) { - var a = new stream.Readable() - a._read = function() {} - a.push('hello') - - var pipeline = pumpify(a, through(), through()) - var ran = false - - pipeline.on('finish', function() { - t.ok(ran) - t.end() - }) - - setTimeout(function() { - ran = true - a.push(null) - }, 100) -}) - -tape('async', function(t) { - var pipeline = pumpify() - - t.plan(4) - - pipeline.write('hello') - pipeline.on('data', function(data) { - t.same(data.toString(), 'HELLO') - t.end() - }) - - setTimeout(function() { - pipeline.setPipeline( - through(function(data, enc, cb) { - t.same(data.toString(), 'hello') - cb(null, data.toString().toUpperCase()) - }), - through(function(data, enc, cb) { - t.same(data.toString(), 'HELLO') - cb(null, data.toString().toLowerCase()) - }), - through(function(data, enc, cb) { - t.same(data.toString(), 'hello') - cb(null, data.toString().toUpperCase()) - }) - ) - }, 100) -}) - -tape('early destroy', function(t) { - var a = through() - var b = through() - var c = through() - - b.destroy = function() { - t.ok(true) - t.end() - } - - var pipeline = pumpify() - - pipeline.destroy() - setTimeout(function() { - pipeline.setPipeline(a, b, c) - }, 100) -}) - -tape('preserves error', function (t) { - var a = through() - var b = through(function (data, enc, cb) { - cb(new Error('stop')) - }) - var c = through() - var s = pumpify() - - s.on('error', function (err) { - t.same(err.message, 'stop') - t.end() - }) - - s.setPipeline(a, b, c) - s.resume() - s.write('hi') -}) - -tape('preserves error again', function (t) { - var ws = new stream.Writable() - var rs = new stream.Readable({highWaterMark: 16}) - - ws._write = function (data, enc, cb) { - cb(null) - } - - var once = true - rs._read = function () { - process.nextTick(function () { - if (!once) return - once = false - rs.push('hello world') - }) - } - - var pumpifyErr = pumpify( - through(), - through(function(chunk, _, cb) { - cb(new Error('test')) - }), - ws - ) - - rs.pipe(pumpifyErr) - .on('error', function (err) { - t.ok(err) - t.ok(err.message !== 'premature close', 'does not close with premature close') - t.end() - }) -}) - -tape('returns error from duplexify', function (t) { - var a = through() - var b = duplexify() - var s = pumpify() - - s.setPipeline(a, b) - - s.on('error', function (err) { - t.same(err.message, 'stop') - t.end() - }) - - s.write('data') - // Test passes if `.end()` is not called - s.end() - - b.setWritable(through()) - - setImmediate(function () { - b.destroy(new Error('stop')) - }) -}) diff --git a/node_modules/qs/.editorconfig b/node_modules/qs/.editorconfig deleted file mode 100644 index b442a9f..0000000 --- a/node_modules/qs/.editorconfig +++ /dev/null @@ -1,33 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true -max_line_length = 160 - -[test/*] -max_line_length = off - -[LICENSE.md] -indent_size = off - -[*.md] -max_line_length = off - -[*.json] -max_line_length = off - -[Makefile] -max_line_length = off - -[CHANGELOG.md] -indent_style = space -indent_size = 2 - -[LICENSE] -indent_size = 2 -max_line_length = off diff --git a/node_modules/qs/.eslintignore b/node_modules/qs/.eslintignore deleted file mode 100644 index 1521c8b..0000000 --- a/node_modules/qs/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -dist diff --git a/node_modules/qs/.eslintrc b/node_modules/qs/.eslintrc deleted file mode 100644 index e3bde89..0000000 --- a/node_modules/qs/.eslintrc +++ /dev/null @@ -1,21 +0,0 @@ -{ - "root": true, - - "extends": "@ljharb", - - "rules": { - "complexity": 0, - "consistent-return": 1, - "func-name-matching": 0, - "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], - "indent": [2, 4], - "max-lines-per-function": [2, { "max": 150 }], - "max-params": [2, 14], - "max-statements": [2, 52], - "multiline-comment-style": 0, - "no-continue": 1, - "no-magic-numbers": 0, - "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], - "operator-linebreak": [2, "before"], - } -} diff --git a/node_modules/qs/.github/FUNDING.yml b/node_modules/qs/.github/FUNDING.yml deleted file mode 100644 index 0355f4f..0000000 --- a/node_modules/qs/.github/FUNDING.yml +++ /dev/null @@ -1,12 +0,0 @@ -# These are supported funding model platforms - -github: [ljharb] -patreon: # Replace with a single Patreon username -open_collective: # Replace with a single Open Collective username -ko_fi: # Replace with a single Ko-fi username -tidelift: npm/qs -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: # Replace with a single Liberapay username -issuehunt: # Replace with a single IssueHunt username -otechie: # Replace with a single Otechie username -custom: # Replace with a single custom sponsorship URL diff --git a/node_modules/qs/CHANGELOG.md b/node_modules/qs/CHANGELOG.md deleted file mode 100644 index 5591d20..0000000 --- a/node_modules/qs/CHANGELOG.md +++ /dev/null @@ -1,285 +0,0 @@ -## **6.9.1** -- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) -- [Fix] `parse`: with comma true, do not split non-string values (#334) -- [meta] add `funding` field -- [Dev Deps] update `eslint`, `@ljharb/eslint-config` -- [Tests] use shared travis-ci config - -## **6.9.0** -- [New] `parse`/`stringify`: Pass extra key/value argument to `decoder` (#333) -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `evalmd` -- [Tests] `parse`: add passing `arrayFormat` tests -- [Tests] add `posttest` using `npx aud` to run `npm audit` without a lockfile -- [Tests] up to `node` `v12.10`, `v11.15`, `v10.16`, `v8.16` -- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray - -## **6.8.0** -- [New] add `depth=false` to preserve the original key; [Fix] `depth=0` should preserve the original key (#326) -- [New] [Fix] stringify symbols and bigints -- [Fix] ensure node 0.12 can stringify Symbols -- [Fix] fix for an impossible situation: when the formatter is called with a non-string value -- [Refactor] `formats`: tiny bit of cleanup. -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `safe-publish-latest`, `iconv-lite`, `tape` -- [Tests] add tests for `depth=0` and `depth=false` behavior, both current and intuitive/intended (#326) -- [Tests] use `eclint` instead of `editorconfig-tools` -- [docs] readme: add security note -- [meta] add github sponsorship -- [meta] add FUNDING.yml -- [meta] Clean up license text so it’s properly detected as BSD-3-Clause - -## **6.7.0** -- [New] `stringify`/`parse`: add `comma` as an `arrayFormat` option (#276, #219) -- [Fix] correctly parse nested arrays (#212) -- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source, also with an array source -- [Robustness] `stringify`: cache `Object.prototype.hasOwnProperty` -- [Refactor] `utils`: `isBuffer`: small tweak; add tests -- [Refactor] use cached `Array.isArray` -- [Refactor] `parse`/`stringify`: make a function to normalize the options -- [Refactor] `utils`: reduce observable [[Get]]s -- [Refactor] `stringify`/`utils`: cache `Array.isArray` -- [Tests] always use `String(x)` over `x.toString()` -- [Tests] fix Buffer tests to work in node < 4.5 and node < 5.10 -- [Tests] temporarily allow coverage to fail - -## **6.6.0** -- [New] Add support for iso-8859-1, utf8 "sentinel" and numeric entities (#268) -- [New] move two-value combine to a `utils` function (#189) -- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) -- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` (#260) -- [Fix] `stringify`: do not crash in an obscure combo of `interpretNumericEntities`, a bad custom `decoder`, & `iso-8859-1` -- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided -- [refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) -- [Refactor] `parse`: only need to reassign the var once -- [Refactor] `parse`/`stringify`: clean up `charset` options checking; fix defaults -- [Refactor] add missing defaults -- [Refactor] `parse`: one less `concat` call -- [Refactor] `utils`: `compactQueue`: make it explicitly side-effecting -- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`, `iconv-lite`, `safe-publish-latest`, `tape` -- [Tests] up to `node` `v10.10`, `v9.11`, `v8.12`, `v6.14`, `v4.9`; pin included builds to LTS - -## **6.5.2** -- [Fix] use `safer-buffer` instead of `Buffer` constructor -- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230) -- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify` - -## **6.5.1** -- [Fix] Fix parsing & compacting very deep objects (#224) -- [Refactor] name utils functions -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` -- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node -- [Tests] Use precise dist for Node.js 0.6 runtime (#225) -- [Tests] make 0.6 required, now that it’s passing -- [Tests] on `node` `v8.2`; fix npm on node 0.6 - -## **6.5.0** -- [New] add `utils.assign` -- [New] pass default encoder/decoder to custom encoder/decoder functions (#206) -- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213) -- [Fix] Handle stringifying empty objects with addQueryPrefix (#217) -- [Fix] do not mutate `options` argument (#207) -- [Refactor] `parse`: cache index to reuse in else statement (#182) -- [Docs] add various badges to readme (#208) -- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape` -- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4 -- [Tests] add `editorconfig-tools` - -## **6.4.0** -- [New] `qs.stringify`: add `encodeValuesOnly` option -- [Fix] follow `allowPrototypes` option during merge (#201, #201) -- [Fix] support keys starting with brackets (#202, #200) -- [Fix] chmod a-x -- [Dev Deps] update `eslint` -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds -- [eslint] reduce warnings - -## **6.3.2** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Dev Deps] update `eslint` -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.3.1** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!) -- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape` -- [Tests] on all node minors; improve test matrix -- [Docs] document stringify option `allowDots` (#195) -- [Docs] add empty object and array values example (#195) -- [Docs] Fix minor inconsistency/typo (#192) -- [Docs] document stringify option `sort` (#191) -- [Refactor] `stringify`: throw faster with an invalid encoder -- [Refactor] remove unnecessary escapes (#184) -- Remove contributing.md, since `qs` is no longer part of `hapi` (#183) - -## **6.3.0** -- [New] Add support for RFC 1738 (#174, #173) -- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) -- [Fix] ensure `utils.merge` handles merging two arrays -- [Refactor] only constructors should be capitalized -- [Refactor] capitalized var names are for constructors only -- [Refactor] avoid using a sparse array -- [Robustness] `formats`: cache `String#replace` -- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` -- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix -- [Tests] flesh out arrayLimit/arrayFormat tests (#107) -- [Tests] skip Object.create tests when null objects are not available -- [Tests] Turn on eslint for test files (#175) - -## **6.2.3** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.2.2** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties - -## **6.2.1** -- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values -- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` -- [Tests] remove `parallelshell` since it does not reliably report failures -- [Tests] up to `node` `v6.3`, `v5.12` -- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` - -## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) -- [New] pass Buffers to the encoder/decoder directly (#161) -- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) -- [Fix] fix compacting of nested sparse arrays (#150) - -## **6.1.2 -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.1.1** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties - -## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) -- [New] allowDots option for `stringify` (#151) -- [Fix] "sort" option should work at a depth of 3 or more (#151) -- [Fix] Restore `dist` directory; will be removed in v7 (#148) - -## **6.0.4** -- [Fix] follow `allowPrototypes` option during merge (#201, #200) -- [Fix] chmod a-x -- [Fix] support keys starting with brackets (#202, #200) -- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds - -## **6.0.3** -- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties -- [Fix] Restore `dist` directory; will be removed in v7 (#148) - -## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) -- Revert ES6 requirement and restore support for node down to v0.8. - -## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) -- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json - -## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) -- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 - -## **5.2.1** -- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values - -## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) -- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string - -## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) -- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional -- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify - -## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) -- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false -- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm - -## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) -- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional - -## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) -- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" - -## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) -- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties -- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost -- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing -- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object -- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option -- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. -- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 -- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 -- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign -- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute - -## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) -- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function - -## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) -- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option - -## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) -- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 -- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader - -## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) -- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object - -## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) -- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". - -## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) -- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 - -## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) -- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? -- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 -- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 - -## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) -- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number - -## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) -- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array -- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x - -## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) -- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value -- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty -- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? - -## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) -- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 -- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects - -## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) -- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present -- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays -- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge -- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? - -## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) -- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter - -## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) -- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? -- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit -- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 - -## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) -- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values - -## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) -- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters -- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block - -## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) -- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument -- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed - -## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) -- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted -- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null -- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README - -## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) -- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/qs/LICENSE.md b/node_modules/qs/LICENSE.md deleted file mode 100644 index fecf6b6..0000000 --- a/node_modules/qs/LICENSE.md +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2014, Nathan LaFreniere and other [contributors](https://github.com/ljharb/qs/graphs/contributors) -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/qs/README.md b/node_modules/qs/README.md deleted file mode 100644 index 8592a4c..0000000 --- a/node_modules/qs/README.md +++ /dev/null @@ -1,598 +0,0 @@ -# qs [![Version Badge][2]][1] - -[![Build Status][3]][4] -[![dependency status][5]][6] -[![dev dependency status][7]][8] -[![License][license-image]][license-url] -[![Downloads][downloads-image]][downloads-url] - -[![npm badge][11]][1] - -A querystring parsing and stringifying library with some added security. - -Lead Maintainer: [Jordan Harband](https://github.com/ljharb) - -The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). - -## Usage - -```javascript -var qs = require('qs'); -var assert = require('assert'); - -var obj = qs.parse('a=c'); -assert.deepEqual(obj, { a: 'c' }); - -var str = qs.stringify(obj); -assert.equal(str, 'a=c'); -``` - -### Parsing Objects - -[](#preventEval) -```javascript -qs.parse(string, [options]); -``` - -**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. -For example, the string `'foo[bar]=baz'` converts to: - -```javascript -assert.deepEqual(qs.parse('foo[bar]=baz'), { - foo: { - bar: 'baz' - } -}); -``` - -When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: - -```javascript -var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); -assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); -``` - -By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. - -```javascript -var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); -assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); -``` - -URI encoded strings work too: - -```javascript -assert.deepEqual(qs.parse('a%5Bb%5D=c'), { - a: { b: 'c' } -}); -``` - -You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: - -```javascript -assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { - foo: { - bar: { - baz: 'foobarbaz' - } - } -}); -``` - -By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like -`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: - -```javascript -var expected = { - a: { - b: { - c: { - d: { - e: { - f: { - '[g][h][i]': 'j' - } - } - } - } - } - } -}; -var string = 'a[b][c][d][e][f][g][h][i]=j'; -assert.deepEqual(qs.parse(string), expected); -``` - -This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: - -```javascript -var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); -assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); -``` - -The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. - -For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: - -```javascript -var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); -assert.deepEqual(limited, { a: 'b' }); -``` - -To bypass the leading question mark, use `ignoreQueryPrefix`: - -```javascript -var prefixed = qs.parse('?a=b&c=d', { ignoreQueryPrefix: true }); -assert.deepEqual(prefixed, { a: 'b', c: 'd' }); -``` - -An optional delimiter can also be passed: - -```javascript -var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); -assert.deepEqual(delimited, { a: 'b', c: 'd' }); -``` - -Delimiters can be a regular expression too: - -```javascript -var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); -assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); -``` - -Option `allowDots` can be used to enable dot notation: - -```javascript -var withDots = qs.parse('a.b=c', { allowDots: true }); -assert.deepEqual(withDots, { a: { b: 'c' } }); -``` - -If you have to deal with legacy browsers or services, there's -also support for decoding percent-encoded octets as iso-8859-1: - -```javascript -var oldCharset = qs.parse('a=%A7', { charset: 'iso-8859-1' }); -assert.deepEqual(oldCharset, { a: '§' }); -``` - -Some services add an initial `utf8=✓` value to forms so that old -Internet Explorer versions are more likely to submit the form as -utf-8. Additionally, the server can check the value against wrong -encodings of the checkmark character and detect that a query string -or `application/x-www-form-urlencoded` body was *not* sent as -utf-8, eg. if the form had an `accept-charset` parameter or the -containing page had a different character set. - -**qs** supports this mechanism via the `charsetSentinel` option. -If specified, the `utf8` parameter will be omitted from the -returned object. It will be used to switch to `iso-8859-1`/`utf-8` -mode depending on how the checkmark is encoded. - -**Important**: When you specify both the `charset` option and the -`charsetSentinel` option, the `charset` will be overridden when -the request contains a `utf8` parameter from which the actual -charset can be deduced. In that sense the `charset` will behave -as the default charset rather than the authoritative charset. - -```javascript -var detectedAsUtf8 = qs.parse('utf8=%E2%9C%93&a=%C3%B8', { - charset: 'iso-8859-1', - charsetSentinel: true -}); -assert.deepEqual(detectedAsUtf8, { a: 'ø' }); - -// Browsers encode the checkmark as ✓ when submitting as iso-8859-1: -var detectedAsIso8859_1 = qs.parse('utf8=%26%2310003%3B&a=%F8', { - charset: 'utf-8', - charsetSentinel: true -}); -assert.deepEqual(detectedAsIso8859_1, { a: 'ø' }); -``` - -If you want to decode the `&#...;` syntax to the actual character, -you can specify the `interpretNumericEntities` option as well: - -```javascript -var detectedAsIso8859_1 = qs.parse('a=%26%239786%3B', { - charset: 'iso-8859-1', - interpretNumericEntities: true -}); -assert.deepEqual(detectedAsIso8859_1, { a: '☺' }); -``` - -It also works when the charset has been detected in `charsetSentinel` -mode. - -### Parsing Arrays - -**qs** can also parse arrays using a similar `[]` notation: - -```javascript -var withArray = qs.parse('a[]=b&a[]=c'); -assert.deepEqual(withArray, { a: ['b', 'c'] }); -``` - -You may specify an index as well: - -```javascript -var withIndexes = qs.parse('a[1]=c&a[0]=b'); -assert.deepEqual(withIndexes, { a: ['b', 'c'] }); -``` - -Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number -to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving -their order: - -```javascript -var noSparse = qs.parse('a[1]=b&a[15]=c'); -assert.deepEqual(noSparse, { a: ['b', 'c'] }); -``` - -Note that an empty string is also a value, and will be preserved: - -```javascript -var withEmptyString = qs.parse('a[]=&a[]=b'); -assert.deepEqual(withEmptyString, { a: ['', 'b'] }); - -var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); -assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); -``` - -**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will -instead be converted to an object with the index as the key. This is needed to handle cases when someone sent, for example, `a[999999999]` and it will take significant time to iterate over this huge array. - -```javascript -var withMaxIndex = qs.parse('a[100]=b'); -assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); -``` - -This limit can be overridden by passing an `arrayLimit` option: - -```javascript -var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); -assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); -``` - -To disable array parsing entirely, set `parseArrays` to `false`. - -```javascript -var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); -assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); -``` - -If you mix notations, **qs** will merge the two items into an object: - -```javascript -var mixedNotation = qs.parse('a[0]=b&a[b]=c'); -assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); -``` - -You can also create arrays of objects: - -```javascript -var arraysOfObjects = qs.parse('a[][b]=c'); -assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); -``` - -Some people use comma to join array, **qs** can parse it: -```javascript -var arraysOfObjects = qs.parse('a=b,c', { comma: true }) -assert.deepEqual(arraysOfObjects, { a: ['b', 'c'] }) -``` -(_this cannot convert nested objects, such as `a={b:1},{c:d}`_) - -### Stringifying - -[](#preventEval) -```javascript -qs.stringify(object, [options]); -``` - -When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: - -```javascript -assert.equal(qs.stringify({ a: 'b' }), 'a=b'); -assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); -``` - -This encoding can be disabled by setting the `encode` option to `false`: - -```javascript -var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); -assert.equal(unencoded, 'a[b]=c'); -``` - -Encoding can be disabled for keys by setting the `encodeValuesOnly` option to `true`: -```javascript -var encodedValues = qs.stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true } -); -assert.equal(encodedValues,'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); -``` - -This encoding can also be replaced by a custom encoding method set as `encoder` option: - -```javascript -var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { - // Passed in values `a`, `b`, `c` - return // Return encoded string -}}) -``` - -_(Note: the `encoder` option does not apply if `encode` is `false`)_ - -Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: - -```javascript -var decoded = qs.parse('x=z', { decoder: function (str) { - // Passed in values `x`, `z` - return // Return decoded string -}}) -``` - -You can encode keys and values using different logic by using the type argument provided to the encoder: - -```javascript -var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str, defaultEncoder, charset, type) { - if (type === 'key') { - return // Encoded key - } else if (type === 'value') { - return // Encoded value - } -}}) -``` - -The type argument is also provided to the decoder: - -```javascript -var decoded = qs.parse('x=z', { decoder: function (str, defaultEncoder, charset, type) { - if (type === 'key') { - return // Decoded key - } else if (type === 'value') { - return // Decoded value - } -}}) -``` - -Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. - -When arrays are stringified, by default they are given explicit indices: - -```javascript -qs.stringify({ a: ['b', 'c', 'd'] }); -// 'a[0]=b&a[1]=c&a[2]=d' -``` - -You may override this by setting the `indices` option to `false`: - -```javascript -qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); -// 'a=b&a=c&a=d' -``` - -You may use the `arrayFormat` option to specify the format of the output array: - -```javascript -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) -// 'a[0]=b&a[1]=c' -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) -// 'a[]=b&a[]=c' -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) -// 'a=b&a=c' -qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'comma' }) -// 'a=b,c' -``` - -When objects are stringified, by default they use bracket notation: - -```javascript -qs.stringify({ a: { b: { c: 'd', e: 'f' } } }); -// 'a[b][c]=d&a[b][e]=f' -``` - -You may override this to use dot notation by setting the `allowDots` option to `true`: - -```javascript -qs.stringify({ a: { b: { c: 'd', e: 'f' } } }, { allowDots: true }); -// 'a.b.c=d&a.b.e=f' -``` - -Empty strings and null values will omit the value, but the equals sign (=) remains in place: - -```javascript -assert.equal(qs.stringify({ a: '' }), 'a='); -``` - -Key with no values (such as an empty object or array) will return nothing: - -```javascript -assert.equal(qs.stringify({ a: [] }), ''); -assert.equal(qs.stringify({ a: {} }), ''); -assert.equal(qs.stringify({ a: [{}] }), ''); -assert.equal(qs.stringify({ a: { b: []} }), ''); -assert.equal(qs.stringify({ a: { b: {}} }), ''); -``` - -Properties that are set to `undefined` will be omitted entirely: - -```javascript -assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); -``` - -The query string may optionally be prepended with a question mark: - -```javascript -assert.equal(qs.stringify({ a: 'b', c: 'd' }, { addQueryPrefix: true }), '?a=b&c=d'); -``` - -The delimiter may be overridden with stringify as well: - -```javascript -assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); -``` - -If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: - -```javascript -var date = new Date(7); -assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); -assert.equal( - qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), - 'a=7' -); -``` - -You may use the `sort` option to affect the order of parameter keys: - -```javascript -function alphabeticalSort(a, b) { - return a.localeCompare(b); -} -assert.equal(qs.stringify({ a: 'c', z: 'y', b : 'f' }, { sort: alphabeticalSort }), 'a=c&b=f&z=y'); -``` - -Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. -If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you -pass an array, it will be used to select properties and array indices for stringification: - -```javascript -function filterFunc(prefix, value) { - if (prefix == 'b') { - // Return an `undefined` value to omit a property. - return; - } - if (prefix == 'e[f]') { - return value.getTime(); - } - if (prefix == 'e[g][0]') { - return value * 2; - } - return value; -} -qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); -// 'a=b&c=d&e[f]=123&e[g][0]=4' -qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); -// 'a=b&e=f' -qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); -// 'a[0]=b&a[2]=d' -``` - -### Handling of `null` values - -By default, `null` values are treated like empty strings: - -```javascript -var withNull = qs.stringify({ a: null, b: '' }); -assert.equal(withNull, 'a=&b='); -``` - -Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. - -```javascript -var equalsInsensitive = qs.parse('a&b='); -assert.deepEqual(equalsInsensitive, { a: '', b: '' }); -``` - -To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` -values have no `=` sign: - -```javascript -var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); -assert.equal(strictNull, 'a&b='); -``` - -To parse values without `=` back to `null` use the `strictNullHandling` flag: - -```javascript -var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); -assert.deepEqual(parsedStrictNull, { a: null, b: '' }); -``` - -To completely skip rendering keys with `null` values, use the `skipNulls` flag: - -```javascript -var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); -assert.equal(nullsSkipped, 'a=b'); -``` - -If you're communicating with legacy systems, you can switch to `iso-8859-1` -using the `charset` option: - -```javascript -var iso = qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }); -assert.equal(iso, '%E6=%E6'); -``` - -Characters that don't exist in `iso-8859-1` will be converted to numeric -entities, similar to what browsers do: - -```javascript -var numeric = qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }); -assert.equal(numeric, 'a=%26%239786%3B'); -``` - -You can use the `charsetSentinel` option to announce the character by -including an `utf8=✓` parameter with the proper encoding if the checkmark, -similar to what Ruby on Rails and others do when submitting forms. - -```javascript -var sentinel = qs.stringify({ a: '☺' }, { charsetSentinel: true }); -assert.equal(sentinel, 'utf8=%E2%9C%93&a=%E2%98%BA'); - -var isoSentinel = qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }); -assert.equal(isoSentinel, 'utf8=%26%2310003%3B&a=%E6'); -``` - -### Dealing with special character sets - -By default the encoding and decoding of characters is done in `utf-8`, -and `iso-8859-1` support is also built in via the `charset` parameter. - -If you wish to encode querystrings to a different character set (i.e. -[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the -[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: - -```javascript -var encoder = require('qs-iconv/encoder')('shift_jis'); -var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); -assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); -``` - -This also works for decoding of query strings: - -```javascript -var decoder = require('qs-iconv/decoder')('shift_jis'); -var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); -assert.deepEqual(obj, { a: 'こんにちは!' }); -``` - -### RFC 3986 and RFC 1738 space encoding - -RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. -In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. - -``` -assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); -assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); -assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); -``` - -## Security - -Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report. - -[1]: https://npmjs.org/package/qs -[2]: http://versionbadg.es/ljharb/qs.svg -[3]: https://api.travis-ci.org/ljharb/qs.svg -[4]: https://travis-ci.org/ljharb/qs -[5]: https://david-dm.org/ljharb/qs.svg -[6]: https://david-dm.org/ljharb/qs -[7]: https://david-dm.org/ljharb/qs/dev-status.svg -[8]: https://david-dm.org/ljharb/qs?type=dev -[9]: https://ci.testling.com/ljharb/qs.png -[10]: https://ci.testling.com/ljharb/qs -[11]: https://nodei.co/npm/qs.png?downloads=true&stars=true -[license-image]: http://img.shields.io/npm/l/qs.svg -[license-url]: LICENSE -[downloads-image]: http://img.shields.io/npm/dm/qs.svg -[downloads-url]: http://npm-stat.com/charts.html?package=qs diff --git a/node_modules/qs/dist/qs.js b/node_modules/qs/dist/qs.js deleted file mode 100644 index 602c3a6..0000000 --- a/node_modules/qs/dist/qs.js +++ /dev/null @@ -1,812 +0,0 @@ -(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i -1) { - val = val.split(','); - } - - if (part.indexOf('[]=') > -1) { - val = isArray(val) ? [val] : val; - } - - if (has.call(obj, key)) { - obj[key] = utils.combine(obj[key], val); - } else { - obj[key] = val; - } - } - - return obj; -}; - -var parseObject = function (chain, val, options) { - var leaf = val; - - for (var i = chain.length - 1; i >= 0; --i) { - var obj; - var root = chain[i]; - - if (root === '[]' && options.parseArrays) { - obj = [].concat(leaf); - } else { - obj = options.plainObjects ? Object.create(null) : {}; - var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; - var index = parseInt(cleanRoot, 10); - if (!options.parseArrays && cleanRoot === '') { - obj = { 0: leaf }; - } else if ( - !isNaN(index) - && root !== cleanRoot - && String(index) === cleanRoot - && index >= 0 - && (options.parseArrays && index <= options.arrayLimit) - ) { - obj = []; - obj[index] = leaf; - } else { - obj[cleanRoot] = leaf; - } - } - - leaf = obj; - } - - return leaf; -}; - -var parseKeys = function parseQueryStringKeys(givenKey, val, options) { - if (!givenKey) { - return; - } - - // Transform dot notation to bracket notation - var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; - - // The regex chunks - - var brackets = /(\[[^[\]]*])/; - var child = /(\[[^[\]]*])/g; - - // Get the parent - - var segment = options.depth > 0 && brackets.exec(key); - var parent = segment ? key.slice(0, segment.index) : key; - - // Stash the parent if it exists - - var keys = []; - if (parent) { - // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties - if (!options.plainObjects && has.call(Object.prototype, parent)) { - if (!options.allowPrototypes) { - return; - } - } - - keys.push(parent); - } - - // Loop through children appending to the array until we hit depth - - var i = 0; - while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { - i += 1; - if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { - if (!options.allowPrototypes) { - return; - } - } - keys.push(segment[1]); - } - - // If there's a remainder, just add whatever is left - - if (segment) { - keys.push('[' + key.slice(segment.index) + ']'); - } - - return parseObject(keys, val, options); -}; - -var normalizeParseOptions = function normalizeParseOptions(opts) { - if (!opts) { - return defaults; - } - - if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { - throw new TypeError('Decoder has to be a function.'); - } - - if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { - throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined'); - } - var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; - - return { - allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, - allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, - arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, - charset: charset, - charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, - comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, - decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, - delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, - // eslint-disable-next-line no-implicit-coercion, no-extra-parens - depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, - ignoreQueryPrefix: opts.ignoreQueryPrefix === true, - interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, - parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, - parseArrays: opts.parseArrays !== false, - plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, - strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling - }; -}; - -module.exports = function (str, opts) { - var options = normalizeParseOptions(opts); - - if (str === '' || str === null || typeof str === 'undefined') { - return options.plainObjects ? Object.create(null) : {}; - } - - var tempObj = typeof str === 'string' ? parseValues(str, options) : str; - var obj = options.plainObjects ? Object.create(null) : {}; - - // Iterate over the keys and setup the new object - - var keys = Object.keys(tempObj); - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - var newObj = parseKeys(key, tempObj[key], options); - obj = utils.merge(obj, newObj, options); - } - - return utils.compact(obj); -}; - -},{"./utils":5}],4:[function(require,module,exports){ -'use strict'; - -var utils = require('./utils'); -var formats = require('./formats'); -var has = Object.prototype.hasOwnProperty; - -var arrayPrefixGenerators = { - brackets: function brackets(prefix) { - return prefix + '[]'; - }, - comma: 'comma', - indices: function indices(prefix, key) { - return prefix + '[' + key + ']'; - }, - repeat: function repeat(prefix) { - return prefix; - } -}; - -var isArray = Array.isArray; -var push = Array.prototype.push; -var pushToArray = function (arr, valueOrArray) { - push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); -}; - -var toISO = Date.prototype.toISOString; - -var defaultFormat = formats['default']; -var defaults = { - addQueryPrefix: false, - allowDots: false, - charset: 'utf-8', - charsetSentinel: false, - delimiter: '&', - encode: true, - encoder: utils.encode, - encodeValuesOnly: false, - format: defaultFormat, - formatter: formats.formatters[defaultFormat], - // deprecated - indices: false, - serializeDate: function serializeDate(date) { - return toISO.call(date); - }, - skipNulls: false, - strictNullHandling: false -}; - -var isNonNullishPrimitive = function isNonNullishPrimitive(v) { - return typeof v === 'string' - || typeof v === 'number' - || typeof v === 'boolean' - || typeof v === 'symbol' - || typeof v === 'bigint'; -}; - -var stringify = function stringify( - object, - prefix, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset -) { - var obj = object; - if (typeof filter === 'function') { - obj = filter(prefix, obj); - } else if (obj instanceof Date) { - obj = serializeDate(obj); - } else if (generateArrayPrefix === 'comma' && isArray(obj)) { - obj = obj.join(','); - } - - if (obj === null) { - if (strictNullHandling) { - return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix; - } - - obj = ''; - } - - if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { - if (encoder) { - var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key'); - return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))]; - } - return [formatter(prefix) + '=' + formatter(String(obj))]; - } - - var values = []; - - if (typeof obj === 'undefined') { - return values; - } - - var objKeys; - if (isArray(filter)) { - objKeys = filter; - } else { - var keys = Object.keys(obj); - objKeys = sort ? keys.sort(sort) : keys; - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (skipNulls && obj[key] === null) { - continue; - } - - if (isArray(obj)) { - pushToArray(values, stringify( - obj[key], - typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset - )); - } else { - pushToArray(values, stringify( - obj[key], - prefix + (allowDots ? '.' + key : '[' + key + ']'), - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset - )); - } - } - - return values; -}; - -var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { - if (!opts) { - return defaults; - } - - if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { - throw new TypeError('Encoder has to be a function.'); - } - - var charset = opts.charset || defaults.charset; - if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { - throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); - } - - var format = formats['default']; - if (typeof opts.format !== 'undefined') { - if (!has.call(formats.formatters, opts.format)) { - throw new TypeError('Unknown format option provided.'); - } - format = opts.format; - } - var formatter = formats.formatters[format]; - - var filter = defaults.filter; - if (typeof opts.filter === 'function' || isArray(opts.filter)) { - filter = opts.filter; - } - - return { - addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, - allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, - charset: charset, - charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, - delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, - encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, - encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, - encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, - filter: filter, - formatter: formatter, - serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, - skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, - sort: typeof opts.sort === 'function' ? opts.sort : null, - strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling - }; -}; - -module.exports = function (object, opts) { - var obj = object; - var options = normalizeStringifyOptions(opts); - - var objKeys; - var filter; - - if (typeof options.filter === 'function') { - filter = options.filter; - obj = filter('', obj); - } else if (isArray(options.filter)) { - filter = options.filter; - objKeys = filter; - } - - var keys = []; - - if (typeof obj !== 'object' || obj === null) { - return ''; - } - - var arrayFormat; - if (opts && opts.arrayFormat in arrayPrefixGenerators) { - arrayFormat = opts.arrayFormat; - } else if (opts && 'indices' in opts) { - arrayFormat = opts.indices ? 'indices' : 'repeat'; - } else { - arrayFormat = 'indices'; - } - - var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; - - if (!objKeys) { - objKeys = Object.keys(obj); - } - - if (options.sort) { - objKeys.sort(options.sort); - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (options.skipNulls && obj[key] === null) { - continue; - } - pushToArray(keys, stringify( - obj[key], - key, - generateArrayPrefix, - options.strictNullHandling, - options.skipNulls, - options.encode ? options.encoder : null, - options.filter, - options.sort, - options.allowDots, - options.serializeDate, - options.formatter, - options.encodeValuesOnly, - options.charset - )); - } - - var joined = keys.join(options.delimiter); - var prefix = options.addQueryPrefix === true ? '?' : ''; - - if (options.charsetSentinel) { - if (options.charset === 'iso-8859-1') { - // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark - prefix += 'utf8=%26%2310003%3B&'; - } else { - // encodeURIComponent('✓') - prefix += 'utf8=%E2%9C%93&'; - } - } - - return joined.length > 0 ? prefix + joined : ''; -}; - -},{"./formats":1,"./utils":5}],5:[function(require,module,exports){ -'use strict'; - -var has = Object.prototype.hasOwnProperty; -var isArray = Array.isArray; - -var hexTable = (function () { - var array = []; - for (var i = 0; i < 256; ++i) { - array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); - } - - return array; -}()); - -var compactQueue = function compactQueue(queue) { - while (queue.length > 1) { - var item = queue.pop(); - var obj = item.obj[item.prop]; - - if (isArray(obj)) { - var compacted = []; - - for (var j = 0; j < obj.length; ++j) { - if (typeof obj[j] !== 'undefined') { - compacted.push(obj[j]); - } - } - - item.obj[item.prop] = compacted; - } - } -}; - -var arrayToObject = function arrayToObject(source, options) { - var obj = options && options.plainObjects ? Object.create(null) : {}; - for (var i = 0; i < source.length; ++i) { - if (typeof source[i] !== 'undefined') { - obj[i] = source[i]; - } - } - - return obj; -}; - -var merge = function merge(target, source, options) { - /* eslint no-param-reassign: 0 */ - if (!source) { - return target; - } - - if (typeof source !== 'object') { - if (isArray(target)) { - target.push(source); - } else if (target && typeof target === 'object') { - if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { - target[source] = true; - } - } else { - return [target, source]; - } - - return target; - } - - if (!target || typeof target !== 'object') { - return [target].concat(source); - } - - var mergeTarget = target; - if (isArray(target) && !isArray(source)) { - mergeTarget = arrayToObject(target, options); - } - - if (isArray(target) && isArray(source)) { - source.forEach(function (item, i) { - if (has.call(target, i)) { - var targetItem = target[i]; - if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { - target[i] = merge(targetItem, item, options); - } else { - target.push(item); - } - } else { - target[i] = item; - } - }); - return target; - } - - return Object.keys(source).reduce(function (acc, key) { - var value = source[key]; - - if (has.call(acc, key)) { - acc[key] = merge(acc[key], value, options); - } else { - acc[key] = value; - } - return acc; - }, mergeTarget); -}; - -var assign = function assignSingleSource(target, source) { - return Object.keys(source).reduce(function (acc, key) { - acc[key] = source[key]; - return acc; - }, target); -}; - -var decode = function (str, decoder, charset) { - var strWithoutPlus = str.replace(/\+/g, ' '); - if (charset === 'iso-8859-1') { - // unescape never throws, no try...catch needed: - return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); - } - // utf-8 - try { - return decodeURIComponent(strWithoutPlus); - } catch (e) { - return strWithoutPlus; - } -}; - -var encode = function encode(str, defaultEncoder, charset) { - // This code was originally written by Brian White (mscdex) for the io.js core querystring library. - // It has been adapted here for stricter adherence to RFC 3986 - if (str.length === 0) { - return str; - } - - var string = str; - if (typeof str === 'symbol') { - string = Symbol.prototype.toString.call(str); - } else if (typeof str !== 'string') { - string = String(str); - } - - if (charset === 'iso-8859-1') { - return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { - return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; - }); - } - - var out = ''; - for (var i = 0; i < string.length; ++i) { - var c = string.charCodeAt(i); - - if ( - c === 0x2D // - - || c === 0x2E // . - || c === 0x5F // _ - || c === 0x7E // ~ - || (c >= 0x30 && c <= 0x39) // 0-9 - || (c >= 0x41 && c <= 0x5A) // a-z - || (c >= 0x61 && c <= 0x7A) // A-Z - ) { - out += string.charAt(i); - continue; - } - - if (c < 0x80) { - out = out + hexTable[c]; - continue; - } - - if (c < 0x800) { - out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); - continue; - } - - if (c < 0xD800 || c >= 0xE000) { - out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); - continue; - } - - i += 1; - c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); - out += hexTable[0xF0 | (c >> 18)] - + hexTable[0x80 | ((c >> 12) & 0x3F)] - + hexTable[0x80 | ((c >> 6) & 0x3F)] - + hexTable[0x80 | (c & 0x3F)]; - } - - return out; -}; - -var compact = function compact(value) { - var queue = [{ obj: { o: value }, prop: 'o' }]; - var refs = []; - - for (var i = 0; i < queue.length; ++i) { - var item = queue[i]; - var obj = item.obj[item.prop]; - - var keys = Object.keys(obj); - for (var j = 0; j < keys.length; ++j) { - var key = keys[j]; - var val = obj[key]; - if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { - queue.push({ obj: obj, prop: key }); - refs.push(val); - } - } - } - - compactQueue(queue); - - return value; -}; - -var isRegExp = function isRegExp(obj) { - return Object.prototype.toString.call(obj) === '[object RegExp]'; -}; - -var isBuffer = function isBuffer(obj) { - if (!obj || typeof obj !== 'object') { - return false; - } - - return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); -}; - -var combine = function combine(a, b) { - return [].concat(a, b); -}; - -module.exports = { - arrayToObject: arrayToObject, - assign: assign, - combine: combine, - compact: compact, - decode: decode, - encode: encode, - isBuffer: isBuffer, - isRegExp: isRegExp, - merge: merge -}; - -},{}]},{},[2])(2) -}); diff --git a/node_modules/qs/lib/formats.js b/node_modules/qs/lib/formats.js deleted file mode 100644 index a4ecca7..0000000 --- a/node_modules/qs/lib/formats.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict'; - -var replace = String.prototype.replace; -var percentTwenties = /%20/g; - -var util = require('./utils'); - -var Format = { - RFC1738: 'RFC1738', - RFC3986: 'RFC3986' -}; - -module.exports = util.assign( - { - 'default': Format.RFC3986, - formatters: { - RFC1738: function (value) { - return replace.call(value, percentTwenties, '+'); - }, - RFC3986: function (value) { - return String(value); - } - } - }, - Format -); diff --git a/node_modules/qs/lib/index.js b/node_modules/qs/lib/index.js deleted file mode 100644 index 0d6a97d..0000000 --- a/node_modules/qs/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict'; - -var stringify = require('./stringify'); -var parse = require('./parse'); -var formats = require('./formats'); - -module.exports = { - formats: formats, - parse: parse, - stringify: stringify -}; diff --git a/node_modules/qs/lib/parse.js b/node_modules/qs/lib/parse.js deleted file mode 100644 index 7cca884..0000000 --- a/node_modules/qs/lib/parse.js +++ /dev/null @@ -1,248 +0,0 @@ -'use strict'; - -var utils = require('./utils'); - -var has = Object.prototype.hasOwnProperty; -var isArray = Array.isArray; - -var defaults = { - allowDots: false, - allowPrototypes: false, - arrayLimit: 20, - charset: 'utf-8', - charsetSentinel: false, - comma: false, - decoder: utils.decode, - delimiter: '&', - depth: 5, - ignoreQueryPrefix: false, - interpretNumericEntities: false, - parameterLimit: 1000, - parseArrays: true, - plainObjects: false, - strictNullHandling: false -}; - -var interpretNumericEntities = function (str) { - return str.replace(/&#(\d+);/g, function ($0, numberStr) { - return String.fromCharCode(parseInt(numberStr, 10)); - }); -}; - -// This is what browsers will submit when the ✓ character occurs in an -// application/x-www-form-urlencoded body and the encoding of the page containing -// the form is iso-8859-1, or when the submitted form has an accept-charset -// attribute of iso-8859-1. Presumably also with other charsets that do not contain -// the ✓ character, such as us-ascii. -var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') - -// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. -var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') - -var parseValues = function parseQueryStringValues(str, options) { - var obj = {}; - var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; - var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; - var parts = cleanStr.split(options.delimiter, limit); - var skipIndex = -1; // Keep track of where the utf8 sentinel was found - var i; - - var charset = options.charset; - if (options.charsetSentinel) { - for (i = 0; i < parts.length; ++i) { - if (parts[i].indexOf('utf8=') === 0) { - if (parts[i] === charsetSentinel) { - charset = 'utf-8'; - } else if (parts[i] === isoSentinel) { - charset = 'iso-8859-1'; - } - skipIndex = i; - i = parts.length; // The eslint settings do not allow break; - } - } - } - - for (i = 0; i < parts.length; ++i) { - if (i === skipIndex) { - continue; - } - var part = parts[i]; - - var bracketEqualsPos = part.indexOf(']='); - var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; - - var key, val; - if (pos === -1) { - key = options.decoder(part, defaults.decoder, charset, 'key'); - val = options.strictNullHandling ? null : ''; - } else { - key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); - val = options.decoder(part.slice(pos + 1), defaults.decoder, charset, 'value'); - } - - if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { - val = interpretNumericEntities(val); - } - - if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { - val = val.split(','); - } - - if (part.indexOf('[]=') > -1) { - val = isArray(val) ? [val] : val; - } - - if (has.call(obj, key)) { - obj[key] = utils.combine(obj[key], val); - } else { - obj[key] = val; - } - } - - return obj; -}; - -var parseObject = function (chain, val, options) { - var leaf = val; - - for (var i = chain.length - 1; i >= 0; --i) { - var obj; - var root = chain[i]; - - if (root === '[]' && options.parseArrays) { - obj = [].concat(leaf); - } else { - obj = options.plainObjects ? Object.create(null) : {}; - var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; - var index = parseInt(cleanRoot, 10); - if (!options.parseArrays && cleanRoot === '') { - obj = { 0: leaf }; - } else if ( - !isNaN(index) - && root !== cleanRoot - && String(index) === cleanRoot - && index >= 0 - && (options.parseArrays && index <= options.arrayLimit) - ) { - obj = []; - obj[index] = leaf; - } else { - obj[cleanRoot] = leaf; - } - } - - leaf = obj; - } - - return leaf; -}; - -var parseKeys = function parseQueryStringKeys(givenKey, val, options) { - if (!givenKey) { - return; - } - - // Transform dot notation to bracket notation - var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; - - // The regex chunks - - var brackets = /(\[[^[\]]*])/; - var child = /(\[[^[\]]*])/g; - - // Get the parent - - var segment = options.depth > 0 && brackets.exec(key); - var parent = segment ? key.slice(0, segment.index) : key; - - // Stash the parent if it exists - - var keys = []; - if (parent) { - // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties - if (!options.plainObjects && has.call(Object.prototype, parent)) { - if (!options.allowPrototypes) { - return; - } - } - - keys.push(parent); - } - - // Loop through children appending to the array until we hit depth - - var i = 0; - while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { - i += 1; - if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { - if (!options.allowPrototypes) { - return; - } - } - keys.push(segment[1]); - } - - // If there's a remainder, just add whatever is left - - if (segment) { - keys.push('[' + key.slice(segment.index) + ']'); - } - - return parseObject(keys, val, options); -}; - -var normalizeParseOptions = function normalizeParseOptions(opts) { - if (!opts) { - return defaults; - } - - if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { - throw new TypeError('Decoder has to be a function.'); - } - - if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { - throw new Error('The charset option must be either utf-8, iso-8859-1, or undefined'); - } - var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; - - return { - allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, - allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, - arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, - charset: charset, - charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, - comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, - decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, - delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, - // eslint-disable-next-line no-implicit-coercion, no-extra-parens - depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, - ignoreQueryPrefix: opts.ignoreQueryPrefix === true, - interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, - parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, - parseArrays: opts.parseArrays !== false, - plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, - strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling - }; -}; - -module.exports = function (str, opts) { - var options = normalizeParseOptions(opts); - - if (str === '' || str === null || typeof str === 'undefined') { - return options.plainObjects ? Object.create(null) : {}; - } - - var tempObj = typeof str === 'string' ? parseValues(str, options) : str; - var obj = options.plainObjects ? Object.create(null) : {}; - - // Iterate over the keys and setup the new object - - var keys = Object.keys(tempObj); - for (var i = 0; i < keys.length; ++i) { - var key = keys[i]; - var newObj = parseKeys(key, tempObj[key], options); - obj = utils.merge(obj, newObj, options); - } - - return utils.compact(obj); -}; diff --git a/node_modules/qs/lib/stringify.js b/node_modules/qs/lib/stringify.js deleted file mode 100644 index 72ded14..0000000 --- a/node_modules/qs/lib/stringify.js +++ /dev/null @@ -1,279 +0,0 @@ -'use strict'; - -var utils = require('./utils'); -var formats = require('./formats'); -var has = Object.prototype.hasOwnProperty; - -var arrayPrefixGenerators = { - brackets: function brackets(prefix) { - return prefix + '[]'; - }, - comma: 'comma', - indices: function indices(prefix, key) { - return prefix + '[' + key + ']'; - }, - repeat: function repeat(prefix) { - return prefix; - } -}; - -var isArray = Array.isArray; -var push = Array.prototype.push; -var pushToArray = function (arr, valueOrArray) { - push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); -}; - -var toISO = Date.prototype.toISOString; - -var defaultFormat = formats['default']; -var defaults = { - addQueryPrefix: false, - allowDots: false, - charset: 'utf-8', - charsetSentinel: false, - delimiter: '&', - encode: true, - encoder: utils.encode, - encodeValuesOnly: false, - format: defaultFormat, - formatter: formats.formatters[defaultFormat], - // deprecated - indices: false, - serializeDate: function serializeDate(date) { - return toISO.call(date); - }, - skipNulls: false, - strictNullHandling: false -}; - -var isNonNullishPrimitive = function isNonNullishPrimitive(v) { - return typeof v === 'string' - || typeof v === 'number' - || typeof v === 'boolean' - || typeof v === 'symbol' - || typeof v === 'bigint'; -}; - -var stringify = function stringify( - object, - prefix, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset -) { - var obj = object; - if (typeof filter === 'function') { - obj = filter(prefix, obj); - } else if (obj instanceof Date) { - obj = serializeDate(obj); - } else if (generateArrayPrefix === 'comma' && isArray(obj)) { - obj = obj.join(','); - } - - if (obj === null) { - if (strictNullHandling) { - return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix; - } - - obj = ''; - } - - if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { - if (encoder) { - var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key'); - return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))]; - } - return [formatter(prefix) + '=' + formatter(String(obj))]; - } - - var values = []; - - if (typeof obj === 'undefined') { - return values; - } - - var objKeys; - if (isArray(filter)) { - objKeys = filter; - } else { - var keys = Object.keys(obj); - objKeys = sort ? keys.sort(sort) : keys; - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (skipNulls && obj[key] === null) { - continue; - } - - if (isArray(obj)) { - pushToArray(values, stringify( - obj[key], - typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset - )); - } else { - pushToArray(values, stringify( - obj[key], - prefix + (allowDots ? '.' + key : '[' + key + ']'), - generateArrayPrefix, - strictNullHandling, - skipNulls, - encoder, - filter, - sort, - allowDots, - serializeDate, - formatter, - encodeValuesOnly, - charset - )); - } - } - - return values; -}; - -var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { - if (!opts) { - return defaults; - } - - if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { - throw new TypeError('Encoder has to be a function.'); - } - - var charset = opts.charset || defaults.charset; - if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { - throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); - } - - var format = formats['default']; - if (typeof opts.format !== 'undefined') { - if (!has.call(formats.formatters, opts.format)) { - throw new TypeError('Unknown format option provided.'); - } - format = opts.format; - } - var formatter = formats.formatters[format]; - - var filter = defaults.filter; - if (typeof opts.filter === 'function' || isArray(opts.filter)) { - filter = opts.filter; - } - - return { - addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, - allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, - charset: charset, - charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, - delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, - encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, - encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, - encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, - filter: filter, - formatter: formatter, - serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, - skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, - sort: typeof opts.sort === 'function' ? opts.sort : null, - strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling - }; -}; - -module.exports = function (object, opts) { - var obj = object; - var options = normalizeStringifyOptions(opts); - - var objKeys; - var filter; - - if (typeof options.filter === 'function') { - filter = options.filter; - obj = filter('', obj); - } else if (isArray(options.filter)) { - filter = options.filter; - objKeys = filter; - } - - var keys = []; - - if (typeof obj !== 'object' || obj === null) { - return ''; - } - - var arrayFormat; - if (opts && opts.arrayFormat in arrayPrefixGenerators) { - arrayFormat = opts.arrayFormat; - } else if (opts && 'indices' in opts) { - arrayFormat = opts.indices ? 'indices' : 'repeat'; - } else { - arrayFormat = 'indices'; - } - - var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; - - if (!objKeys) { - objKeys = Object.keys(obj); - } - - if (options.sort) { - objKeys.sort(options.sort); - } - - for (var i = 0; i < objKeys.length; ++i) { - var key = objKeys[i]; - - if (options.skipNulls && obj[key] === null) { - continue; - } - pushToArray(keys, stringify( - obj[key], - key, - generateArrayPrefix, - options.strictNullHandling, - options.skipNulls, - options.encode ? options.encoder : null, - options.filter, - options.sort, - options.allowDots, - options.serializeDate, - options.formatter, - options.encodeValuesOnly, - options.charset - )); - } - - var joined = keys.join(options.delimiter); - var prefix = options.addQueryPrefix === true ? '?' : ''; - - if (options.charsetSentinel) { - if (options.charset === 'iso-8859-1') { - // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark - prefix += 'utf8=%26%2310003%3B&'; - } else { - // encodeURIComponent('✓') - prefix += 'utf8=%E2%9C%93&'; - } - } - - return joined.length > 0 ? prefix + joined : ''; -}; diff --git a/node_modules/qs/lib/utils.js b/node_modules/qs/lib/utils.js deleted file mode 100644 index bb97241..0000000 --- a/node_modules/qs/lib/utils.js +++ /dev/null @@ -1,236 +0,0 @@ -'use strict'; - -var has = Object.prototype.hasOwnProperty; -var isArray = Array.isArray; - -var hexTable = (function () { - var array = []; - for (var i = 0; i < 256; ++i) { - array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); - } - - return array; -}()); - -var compactQueue = function compactQueue(queue) { - while (queue.length > 1) { - var item = queue.pop(); - var obj = item.obj[item.prop]; - - if (isArray(obj)) { - var compacted = []; - - for (var j = 0; j < obj.length; ++j) { - if (typeof obj[j] !== 'undefined') { - compacted.push(obj[j]); - } - } - - item.obj[item.prop] = compacted; - } - } -}; - -var arrayToObject = function arrayToObject(source, options) { - var obj = options && options.plainObjects ? Object.create(null) : {}; - for (var i = 0; i < source.length; ++i) { - if (typeof source[i] !== 'undefined') { - obj[i] = source[i]; - } - } - - return obj; -}; - -var merge = function merge(target, source, options) { - /* eslint no-param-reassign: 0 */ - if (!source) { - return target; - } - - if (typeof source !== 'object') { - if (isArray(target)) { - target.push(source); - } else if (target && typeof target === 'object') { - if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { - target[source] = true; - } - } else { - return [target, source]; - } - - return target; - } - - if (!target || typeof target !== 'object') { - return [target].concat(source); - } - - var mergeTarget = target; - if (isArray(target) && !isArray(source)) { - mergeTarget = arrayToObject(target, options); - } - - if (isArray(target) && isArray(source)) { - source.forEach(function (item, i) { - if (has.call(target, i)) { - var targetItem = target[i]; - if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { - target[i] = merge(targetItem, item, options); - } else { - target.push(item); - } - } else { - target[i] = item; - } - }); - return target; - } - - return Object.keys(source).reduce(function (acc, key) { - var value = source[key]; - - if (has.call(acc, key)) { - acc[key] = merge(acc[key], value, options); - } else { - acc[key] = value; - } - return acc; - }, mergeTarget); -}; - -var assign = function assignSingleSource(target, source) { - return Object.keys(source).reduce(function (acc, key) { - acc[key] = source[key]; - return acc; - }, target); -}; - -var decode = function (str, decoder, charset) { - var strWithoutPlus = str.replace(/\+/g, ' '); - if (charset === 'iso-8859-1') { - // unescape never throws, no try...catch needed: - return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); - } - // utf-8 - try { - return decodeURIComponent(strWithoutPlus); - } catch (e) { - return strWithoutPlus; - } -}; - -var encode = function encode(str, defaultEncoder, charset) { - // This code was originally written by Brian White (mscdex) for the io.js core querystring library. - // It has been adapted here for stricter adherence to RFC 3986 - if (str.length === 0) { - return str; - } - - var string = str; - if (typeof str === 'symbol') { - string = Symbol.prototype.toString.call(str); - } else if (typeof str !== 'string') { - string = String(str); - } - - if (charset === 'iso-8859-1') { - return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { - return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; - }); - } - - var out = ''; - for (var i = 0; i < string.length; ++i) { - var c = string.charCodeAt(i); - - if ( - c === 0x2D // - - || c === 0x2E // . - || c === 0x5F // _ - || c === 0x7E // ~ - || (c >= 0x30 && c <= 0x39) // 0-9 - || (c >= 0x41 && c <= 0x5A) // a-z - || (c >= 0x61 && c <= 0x7A) // A-Z - ) { - out += string.charAt(i); - continue; - } - - if (c < 0x80) { - out = out + hexTable[c]; - continue; - } - - if (c < 0x800) { - out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); - continue; - } - - if (c < 0xD800 || c >= 0xE000) { - out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); - continue; - } - - i += 1; - c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); - out += hexTable[0xF0 | (c >> 18)] - + hexTable[0x80 | ((c >> 12) & 0x3F)] - + hexTable[0x80 | ((c >> 6) & 0x3F)] - + hexTable[0x80 | (c & 0x3F)]; - } - - return out; -}; - -var compact = function compact(value) { - var queue = [{ obj: { o: value }, prop: 'o' }]; - var refs = []; - - for (var i = 0; i < queue.length; ++i) { - var item = queue[i]; - var obj = item.obj[item.prop]; - - var keys = Object.keys(obj); - for (var j = 0; j < keys.length; ++j) { - var key = keys[j]; - var val = obj[key]; - if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { - queue.push({ obj: obj, prop: key }); - refs.push(val); - } - } - } - - compactQueue(queue); - - return value; -}; - -var isRegExp = function isRegExp(obj) { - return Object.prototype.toString.call(obj) === '[object RegExp]'; -}; - -var isBuffer = function isBuffer(obj) { - if (!obj || typeof obj !== 'object') { - return false; - } - - return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); -}; - -var combine = function combine(a, b) { - return [].concat(a, b); -}; - -module.exports = { - arrayToObject: arrayToObject, - assign: assign, - combine: combine, - compact: compact, - decode: decode, - encode: encode, - isBuffer: isBuffer, - isRegExp: isRegExp, - merge: merge -}; diff --git a/node_modules/qs/package.json b/node_modules/qs/package.json deleted file mode 100644 index f0941f1..0000000 --- a/node_modules/qs/package.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "_from": "qs@^6.9.1", - "_id": "qs@6.9.1", - "_inBundle": false, - "_integrity": "sha512-Cxm7/SS/y/Z3MHWSxXb8lIFqgqBowP5JMlTUFyJN88y0SGQhVmZnqFK/PeuMX9LzUyWsqqhNxIyg0jlzq946yA==", - "_location": "/qs", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "qs@^6.9.1", - "name": "qs", - "escapedName": "qs", - "rawSpec": "^6.9.1", - "saveSpec": null, - "fetchSpec": "^6.9.1" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/qs/-/qs-6.9.1.tgz", - "_shasum": "20082c65cb78223635ab1a9eaca8875a29bf8ec9", - "_spec": "qs@^6.9.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "bugs": { - "url": "https://github.com/ljharb/qs/issues" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Jordan Harband", - "email": "ljharb@gmail.com", - "url": "http://ljharb.codes" - } - ], - "dependencies": {}, - "deprecated": false, - "description": "A querystring parser that supports nesting and arrays, with a depth limit", - "devDependencies": { - "@ljharb/eslint-config": "^15.0.0", - "browserify": "^16.5.0", - "covert": "^1.1.1", - "eclint": "^2.8.1", - "eslint": "^6.6.0", - "evalmd": "^0.0.19", - "for-each": "^0.3.3", - "has-symbols": "^1.0.0", - "iconv-lite": "^0.4.24", - "mkdirp": "^0.5.1", - "object-inspect": "^1.6.0", - "qs-iconv": "^1.0.4", - "safe-publish-latest": "^1.1.3", - "safer-buffer": "^2.1.2", - "tape": "^4.11.0" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - }, - "homepage": "https://github.com/ljharb/qs", - "keywords": [ - "querystring", - "qs", - "query", - "url", - "parse", - "stringify" - ], - "license": "BSD-3-Clause", - "main": "lib/index.js", - "name": "qs", - "repository": { - "type": "git", - "url": "git+https://github.com/ljharb/qs.git" - }, - "scripts": { - "coverage": "covert test", - "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js", - "lint": "eslint lib/*.js test/*.js", - "postlint": "eclint check * lib/* test/*", - "posttest": "npx aud", - "prepublish": "safe-publish-latest && npm run dist", - "pretest": "npm run --silent readme && npm run --silent lint", - "readme": "evalmd README.md", - "test": "npm run --silent coverage", - "tests-only": "node test" - }, - "version": "6.9.1" -} diff --git a/node_modules/qs/test/.eslintrc b/node_modules/qs/test/.eslintrc deleted file mode 100644 index 8ab8536..0000000 --- a/node_modules/qs/test/.eslintrc +++ /dev/null @@ -1,18 +0,0 @@ -{ - "rules": { - "array-bracket-newline": 0, - "array-element-newline": 0, - "consistent-return": 2, - "function-paren-newline": 0, - "max-lines": 0, - "max-lines-per-function": 0, - "max-nested-callbacks": [2, 3], - "max-statements": 0, - "no-buffer-constructor": 0, - "no-extend-native": 0, - "no-magic-numbers": 0, - "no-throw-literal": 0, - "object-curly-newline": 0, - "sort-keys": 0, - } -} diff --git a/node_modules/qs/test/index.js b/node_modules/qs/test/index.js deleted file mode 100644 index 5e6bc8f..0000000 --- a/node_modules/qs/test/index.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; - -require('./parse'); - -require('./stringify'); - -require('./utils'); diff --git a/node_modules/qs/test/parse.js b/node_modules/qs/test/parse.js deleted file mode 100644 index be10400..0000000 --- a/node_modules/qs/test/parse.js +++ /dev/null @@ -1,758 +0,0 @@ -'use strict'; - -var test = require('tape'); -var qs = require('../'); -var utils = require('../lib/utils'); -var iconv = require('iconv-lite'); -var SaferBuffer = require('safer-buffer').Buffer; - -test('parse()', function (t) { - t.test('parses a simple string', function (st) { - st.deepEqual(qs.parse('0=foo'), { 0: 'foo' }); - st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); - st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); - st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); - st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); - st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); - st.deepEqual(qs.parse('foo'), { foo: '' }); - st.deepEqual(qs.parse('foo='), { foo: '' }); - st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); - st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); - st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); - st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); - st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); - st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); - st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); - st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { - cht: 'p3', - chd: 't:60,40', - chs: '250x100', - chl: 'Hello|World' - }); - st.end(); - }); - - t.test('arrayFormat: brackets allows only explicit arrays', function (st) { - st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'brackets' }), { a: 'b,c' }); - st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); - st.end(); - }); - - t.test('arrayFormat: indices allows only indexed arrays', function (st) { - st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'indices' }), { a: 'b,c' }); - st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); - st.end(); - }); - - t.test('arrayFormat: comma allows only comma-separated arrays', function (st) { - st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'comma' }), { a: 'b,c' }); - st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); - st.end(); - }); - - t.test('arrayFormat: repeat allows only repeated values', function (st) { - st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'repeat' }), { a: 'b,c' }); - st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); - st.end(); - }); - - t.test('allows enabling dot notation', function (st) { - st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); - st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); - st.end(); - }); - - t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); - t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); - t.deepEqual( - qs.parse('a[b][c][d][e][f][g][h]=i'), - { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, - 'defaults to a depth of 5' - ); - - t.test('only parses one level when depth = 1', function (st) { - st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); - st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); - st.end(); - }); - - t.test('uses original key when depth = 0', function (st) { - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: 0 }), { 'a[0]': 'b', 'a[1]': 'c' }); - st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: 0 }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); - st.end(); - }); - - t.test('uses original key when depth = false', function (st) { - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: false }), { 'a[0]': 'b', 'a[1]': 'c' }); - st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: false }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); - st.end(); - }); - - t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); - - t.test('parses an explicit array', function (st) { - st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); - st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); - st.end(); - }); - - t.test('parses a mix of simple and explicit arrays', function (st) { - st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); - - st.deepEqual(qs.parse('a[1]=b&a=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[]=b&a=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); - - st.deepEqual(qs.parse('a=b&a[1]=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b&a[]=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); - - st.end(); - }); - - t.test('parses a nested array', function (st) { - st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); - st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); - st.end(); - }); - - t.test('allows to specify array indices', function (st) { - st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); - st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); - st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 20 }), { a: ['c'] }); - st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 0 }), { a: { 1: 'c' } }); - st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); - st.end(); - }); - - t.test('limits specific array indices to arrayLimit', function (st) { - st.deepEqual(qs.parse('a[20]=a', { arrayLimit: 20 }), { a: ['a'] }); - st.deepEqual(qs.parse('a[21]=a', { arrayLimit: 20 }), { a: { 21: 'a' } }); - st.end(); - }); - - t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); - - t.test('supports encoded = signs', function (st) { - st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); - st.end(); - }); - - t.test('is ok with url encoded strings', function (st) { - st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); - st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); - st.end(); - }); - - t.test('allows brackets in the value', function (st) { - st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); - st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); - st.end(); - }); - - t.test('allows empty values', function (st) { - st.deepEqual(qs.parse(''), {}); - st.deepEqual(qs.parse(null), {}); - st.deepEqual(qs.parse(undefined), {}); - st.end(); - }); - - t.test('transforms arrays to objects', function (st) { - st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); - st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); - st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); - st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); - st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); - st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); - - st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: false }), { a: { 0: 'b', t: 'u' } }); - st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: true }), { a: { 0: 'b', t: 'u', hasOwnProperty: 'c' } }); - st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: false }), { a: { 0: 'b', x: 'y' } }); - st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: true }), { a: { 0: 'b', hasOwnProperty: 'c', x: 'y' } }); - st.end(); - }); - - t.test('transforms arrays to objects (dot notation)', function (st) { - st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); - st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); - st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); - st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); - st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); - st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); - st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); - st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { 0: 'bar', bad: 'baz' } }); - st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); - st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); - st.end(); - }); - - t.test('correctly prunes undefined values when converting an array to an object', function (st) { - st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { 2: 'b', 99999999: 'c' } }); - st.end(); - }); - - t.test('supports malformed uri characters', function (st) { - st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); - st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); - st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); - st.end(); - }); - - t.test('doesn\'t produce empty keys', function (st) { - st.deepEqual(qs.parse('_r=1&'), { _r: '1' }); - st.end(); - }); - - t.test('cannot access Object prototype', function (st) { - qs.parse('constructor[prototype][bad]=bad'); - qs.parse('bad[constructor][prototype][bad]=bad'); - st.equal(typeof Object.prototype.bad, 'undefined'); - st.end(); - }); - - t.test('parses arrays of objects', function (st) { - st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); - st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); - st.end(); - }); - - t.test('allows for empty strings in arrays', function (st) { - st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); - - st.deepEqual( - qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true, arrayLimit: 20 }), - { a: ['b', null, 'c', ''] }, - 'with arrayLimit 20 + array indices: null then empty string works' - ); - st.deepEqual( - qs.parse('a[]=b&a[]&a[]=c&a[]=', { strictNullHandling: true, arrayLimit: 0 }), - { a: ['b', null, 'c', ''] }, - 'with arrayLimit 0 + array brackets: null then empty string works' - ); - - st.deepEqual( - qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true, arrayLimit: 20 }), - { a: ['b', '', 'c', null] }, - 'with arrayLimit 20 + array indices: empty string then null works' - ); - st.deepEqual( - qs.parse('a[]=b&a[]=&a[]=c&a[]', { strictNullHandling: true, arrayLimit: 0 }), - { a: ['b', '', 'c', null] }, - 'with arrayLimit 0 + array brackets: empty string then null works' - ); - - st.deepEqual( - qs.parse('a[]=&a[]=b&a[]=c'), - { a: ['', 'b', 'c'] }, - 'array brackets: empty strings work' - ); - st.end(); - }); - - t.test('compacts sparse arrays', function (st) { - st.deepEqual(qs.parse('a[10]=1&a[2]=2', { arrayLimit: 20 }), { a: ['2', '1'] }); - st.deepEqual(qs.parse('a[1][b][2][c]=1', { arrayLimit: 20 }), { a: [{ b: [{ c: '1' }] }] }); - st.deepEqual(qs.parse('a[1][2][3][c]=1', { arrayLimit: 20 }), { a: [[[{ c: '1' }]]] }); - st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { arrayLimit: 20 }), { a: [[[{ c: ['1'] }]]] }); - st.end(); - }); - - t.test('parses semi-parsed strings', function (st) { - st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); - st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); - st.end(); - }); - - t.test('parses buffers correctly', function (st) { - var b = SaferBuffer.from('test'); - st.deepEqual(qs.parse({ a: b }), { a: b }); - st.end(); - }); - - t.test('parses jquery-param strings', function (st) { - // readable = 'filter[0][]=int1&filter[0][]==&filter[0][]=77&filter[]=and&filter[2][]=int2&filter[2][]==&filter[2][]=8' - var encoded = 'filter%5B0%5D%5B%5D=int1&filter%5B0%5D%5B%5D=%3D&filter%5B0%5D%5B%5D=77&filter%5B%5D=and&filter%5B2%5D%5B%5D=int2&filter%5B2%5D%5B%5D=%3D&filter%5B2%5D%5B%5D=8'; - var expected = { filter: [['int1', '=', '77'], 'and', ['int2', '=', '8']] }; - st.deepEqual(qs.parse(encoded), expected); - st.end(); - }); - - t.test('continues parsing when no parent is found', function (st) { - st.deepEqual(qs.parse('[]=&a=b'), { 0: '', a: 'b' }); - st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { 0: null, a: 'b' }); - st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); - st.end(); - }); - - t.test('does not error when parsing a very long array', function (st) { - var str = 'a[]=a'; - while (Buffer.byteLength(str) < 128 * 1024) { - str = str + '&' + str; - } - - st.doesNotThrow(function () { - qs.parse(str); - }); - - st.end(); - }); - - t.test('should not throw when a native prototype has an enumerable property', function (st) { - Object.prototype.crash = ''; - Array.prototype.crash = ''; - st.doesNotThrow(qs.parse.bind(null, 'a=b')); - st.deepEqual(qs.parse('a=b'), { a: 'b' }); - st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); - st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); - delete Object.prototype.crash; - delete Array.prototype.crash; - st.end(); - }); - - t.test('parses a string with an alternative string delimiter', function (st) { - st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); - st.end(); - }); - - t.test('parses a string with an alternative RegExp delimiter', function (st) { - st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); - st.end(); - }); - - t.test('does not use non-splittable objects as delimiters', function (st) { - st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); - st.end(); - }); - - t.test('allows overriding parameter limit', function (st) { - st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); - st.end(); - }); - - t.test('allows setting the parameter limit to Infinity', function (st) { - st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); - st.end(); - }); - - t.test('allows overriding array limit', function (st) { - st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { 0: 'b' } }); - st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); - st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { 0: 'b', 1: 'c' } }); - st.end(); - }); - - t.test('allows disabling array parsing', function (st) { - var indices = qs.parse('a[0]=b&a[1]=c', { parseArrays: false }); - st.deepEqual(indices, { a: { 0: 'b', 1: 'c' } }); - st.equal(Array.isArray(indices.a), false, 'parseArrays:false, indices case is not an array'); - - var emptyBrackets = qs.parse('a[]=b', { parseArrays: false }); - st.deepEqual(emptyBrackets, { a: { 0: 'b' } }); - st.equal(Array.isArray(emptyBrackets.a), false, 'parseArrays:false, empty brackets case is not an array'); - - st.end(); - }); - - t.test('allows for query string prefix', function (st) { - st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); - st.deepEqual(qs.parse('foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); - st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: false }), { '?foo': 'bar' }); - st.end(); - }); - - t.test('parses an object', function (st) { - var input = { - 'user[name]': { 'pop[bob]': 3 }, - 'user[email]': null - }; - - var expected = { - user: { - name: { 'pop[bob]': 3 }, - email: null - } - }; - - var result = qs.parse(input); - - st.deepEqual(result, expected); - st.end(); - }); - - t.test('parses string with comma as array divider', function (st) { - st.deepEqual(qs.parse('foo=bar,tee', { comma: true }), { foo: ['bar', 'tee'] }); - st.deepEqual(qs.parse('foo[bar]=coffee,tee', { comma: true }), { foo: { bar: ['coffee', 'tee'] } }); - st.deepEqual(qs.parse('foo=', { comma: true }), { foo: '' }); - st.deepEqual(qs.parse('foo', { comma: true }), { foo: '' }); - st.deepEqual(qs.parse('foo', { comma: true, strictNullHandling: true }), { foo: null }); - st.end(); - }); - - t.test('use number decoder, parses string that has one number with comma option enabled', function (st) { - var decoder = function (str, defaultDecoder, charset, type) { - if (!isNaN(Number(str))) { - return parseFloat(str); - } - return defaultDecoder(str, defaultDecoder, charset, type); - }; - - st.deepEqual(qs.parse('foo=1', { comma: true, decoder: decoder }), { foo: 1 }); - st.deepEqual(qs.parse('foo=0', { comma: true, decoder: decoder }), { foo: 0 }); - - st.end(); - }); - - t.test('parses brackets holds array of arrays when having two parts of strings with comma as array divider', function (st) { - st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=4,5,6', { comma: true }), { foo: [['1', '2', '3'], ['4', '5', '6']] }); - st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=', { comma: true }), { foo: [['1', '2', '3'], ''] }); - st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=,', { comma: true }), { foo: [['1', '2', '3'], ['', '']] }); - st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=a', { comma: true }), { foo: [['1', '2', '3'], 'a'] }); - - st.end(); - }); - - t.test('parses an object in dot notation', function (st) { - var input = { - 'user.name': { 'pop[bob]': 3 }, - 'user.email.': null - }; - - var expected = { - user: { - name: { 'pop[bob]': 3 }, - email: null - } - }; - - var result = qs.parse(input, { allowDots: true }); - - st.deepEqual(result, expected); - st.end(); - }); - - t.test('parses an object and not child values', function (st) { - var input = { - 'user[name]': { 'pop[bob]': { test: 3 } }, - 'user[email]': null - }; - - var expected = { - user: { - name: { 'pop[bob]': { test: 3 } }, - email: null - } - }; - - var result = qs.parse(input); - - st.deepEqual(result, expected); - st.end(); - }); - - t.test('does not blow up when Buffer global is missing', function (st) { - var tempBuffer = global.Buffer; - delete global.Buffer; - var result = qs.parse('a=b&c=d'); - global.Buffer = tempBuffer; - st.deepEqual(result, { a: 'b', c: 'd' }); - st.end(); - }); - - t.test('does not crash when parsing circular references', function (st) { - var a = {}; - a.b = a; - - var parsed; - - st.doesNotThrow(function () { - parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); - }); - - st.equal('foo' in parsed, true, 'parsed has "foo" property'); - st.equal('bar' in parsed.foo, true); - st.equal('baz' in parsed.foo, true); - st.equal(parsed.foo.bar, 'baz'); - st.deepEqual(parsed.foo.baz, a); - st.end(); - }); - - t.test('does not crash when parsing deep objects', function (st) { - var parsed; - var str = 'foo'; - - for (var i = 0; i < 5000; i++) { - str += '[p]'; - } - - str += '=bar'; - - st.doesNotThrow(function () { - parsed = qs.parse(str, { depth: 5000 }); - }); - - st.equal('foo' in parsed, true, 'parsed has "foo" property'); - - var depth = 0; - var ref = parsed.foo; - while ((ref = ref.p)) { - depth += 1; - } - - st.equal(depth, 5000, 'parsed is 5000 properties deep'); - - st.end(); - }); - - t.test('parses null objects correctly', { skip: !Object.create }, function (st) { - var a = Object.create(null); - a.b = 'c'; - - st.deepEqual(qs.parse(a), { b: 'c' }); - var result = qs.parse({ a: a }); - st.equal('a' in result, true, 'result has "a" property'); - st.deepEqual(result.a, a); - st.end(); - }); - - t.test('parses dates correctly', function (st) { - var now = new Date(); - st.deepEqual(qs.parse({ a: now }), { a: now }); - st.end(); - }); - - t.test('parses regular expressions correctly', function (st) { - var re = /^test$/; - st.deepEqual(qs.parse({ a: re }), { a: re }); - st.end(); - }); - - t.test('does not allow overwriting prototype properties', function (st) { - st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: false }), {}); - st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: false }), {}); - - st.deepEqual( - qs.parse('toString', { allowPrototypes: false }), - {}, - 'bare "toString" results in {}' - ); - - st.end(); - }); - - t.test('can allow overwriting prototype properties', function (st) { - st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }); - st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }); - - st.deepEqual( - qs.parse('toString', { allowPrototypes: true }), - { toString: '' }, - 'bare "toString" results in { toString: "" }' - ); - - st.end(); - }); - - t.test('params starting with a closing bracket', function (st) { - st.deepEqual(qs.parse(']=toString'), { ']': 'toString' }); - st.deepEqual(qs.parse(']]=toString'), { ']]': 'toString' }); - st.deepEqual(qs.parse(']hello]=toString'), { ']hello]': 'toString' }); - st.end(); - }); - - t.test('params starting with a starting bracket', function (st) { - st.deepEqual(qs.parse('[=toString'), { '[': 'toString' }); - st.deepEqual(qs.parse('[[=toString'), { '[[': 'toString' }); - st.deepEqual(qs.parse('[hello[=toString'), { '[hello[': 'toString' }); - st.end(); - }); - - t.test('add keys to objects', function (st) { - st.deepEqual( - qs.parse('a[b]=c&a=d'), - { a: { b: 'c', d: true } }, - 'can add keys to objects' - ); - - st.deepEqual( - qs.parse('a[b]=c&a=toString'), - { a: { b: 'c' } }, - 'can not overwrite prototype' - ); - - st.deepEqual( - qs.parse('a[b]=c&a=toString', { allowPrototypes: true }), - { a: { b: 'c', toString: true } }, - 'can overwrite prototype with allowPrototypes true' - ); - - st.deepEqual( - qs.parse('a[b]=c&a=toString', { plainObjects: true }), - { a: { b: 'c', toString: true } }, - 'can overwrite prototype with plainObjects true' - ); - - st.end(); - }); - - t.test('can return null objects', { skip: !Object.create }, function (st) { - var expected = Object.create(null); - expected.a = Object.create(null); - expected.a.b = 'c'; - expected.a.hasOwnProperty = 'd'; - st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); - st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); - var expectedArray = Object.create(null); - expectedArray.a = Object.create(null); - expectedArray.a[0] = 'b'; - expectedArray.a.c = 'd'; - st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); - st.end(); - }); - - t.test('can parse with custom encoding', function (st) { - st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { - decoder: function (str) { - var reg = /%([0-9A-F]{2})/ig; - var result = []; - var parts = reg.exec(str); - while (parts) { - result.push(parseInt(parts[1], 16)); - parts = reg.exec(str); - } - return String(iconv.decode(SaferBuffer.from(result), 'shift_jis')); - } - }), { 県: '大阪府' }); - st.end(); - }); - - t.test('receives the default decoder as a second argument', function (st) { - st.plan(1); - qs.parse('a', { - decoder: function (str, defaultDecoder) { - st.equal(defaultDecoder, utils.decode); - } - }); - st.end(); - }); - - t.test('throws error with wrong decoder', function (st) { - st['throws'](function () { - qs.parse({}, { decoder: 'string' }); - }, new TypeError('Decoder has to be a function.')); - st.end(); - }); - - t.test('does not mutate the options argument', function (st) { - var options = {}; - qs.parse('a[b]=true', options); - st.deepEqual(options, {}); - st.end(); - }); - - t.test('throws if an invalid charset is specified', function (st) { - st['throws'](function () { - qs.parse('a=b', { charset: 'foobar' }); - }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); - st.end(); - }); - - t.test('parses an iso-8859-1 string if asked to', function (st) { - st.deepEqual(qs.parse('%A2=%BD', { charset: 'iso-8859-1' }), { '¢': '½' }); - st.end(); - }); - - var urlEncodedCheckmarkInUtf8 = '%E2%9C%93'; - var urlEncodedOSlashInUtf8 = '%C3%B8'; - var urlEncodedNumCheckmark = '%26%2310003%3B'; - var urlEncodedNumSmiley = '%26%239786%3B'; - - t.test('prefers an utf-8 charset specified by the utf8 sentinel to a default charset of iso-8859-1', function (st) { - st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'iso-8859-1' }), { ø: 'ø' }); - st.end(); - }); - - t.test('prefers an iso-8859-1 charset specified by the utf8 sentinel to a default charset of utf-8', function (st) { - st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { 'ø': 'ø' }); - st.end(); - }); - - t.test('does not require the utf8 sentinel to be defined before the parameters whose decoding it affects', function (st) { - st.deepEqual(qs.parse('a=' + urlEncodedOSlashInUtf8 + '&utf8=' + urlEncodedNumCheckmark, { charsetSentinel: true, charset: 'utf-8' }), { a: 'ø' }); - st.end(); - }); - - t.test('should ignore an utf8 sentinel with an unknown value', function (st) { - st.deepEqual(qs.parse('utf8=foo&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { ø: 'ø' }); - st.end(); - }); - - t.test('uses the utf8 sentinel to switch to utf-8 when no default charset is given', function (st) { - st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { ø: 'ø' }); - st.end(); - }); - - t.test('uses the utf8 sentinel to switch to iso-8859-1 when no default charset is given', function (st) { - st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { 'ø': 'ø' }); - st.end(); - }); - - t.test('interprets numeric entities in iso-8859-1 when `interpretNumericEntities`', function (st) { - st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1', interpretNumericEntities: true }), { foo: '☺' }); - st.end(); - }); - - t.test('handles a custom decoder returning `null`, in the `iso-8859-1` charset, when `interpretNumericEntities`', function (st) { - st.deepEqual(qs.parse('foo=&bar=' + urlEncodedNumSmiley, { - charset: 'iso-8859-1', - decoder: function (str, defaultDecoder, charset) { - return str ? defaultDecoder(str, defaultDecoder, charset) : null; - }, - interpretNumericEntities: true - }), { foo: null, bar: '☺' }); - st.end(); - }); - - t.test('does not interpret numeric entities in iso-8859-1 when `interpretNumericEntities` is absent', function (st) { - st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1' }), { foo: '☺' }); - st.end(); - }); - - t.test('does not interpret numeric entities when the charset is utf-8, even when `interpretNumericEntities`', function (st) { - st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'utf-8', interpretNumericEntities: true }), { foo: '☺' }); - st.end(); - }); - - t.test('does not interpret %uXXXX syntax in iso-8859-1 mode', function (st) { - st.deepEqual(qs.parse('%u263A=%u263A', { charset: 'iso-8859-1' }), { '%u263A': '%u263A' }); - st.end(); - }); - - t.test('allows for decoding keys and values differently', function (st) { - var decoder = function (str, defaultDecoder, charset, type) { - if (type === 'key') { - return defaultDecoder(str, defaultDecoder, charset, type).toLowerCase(); - } - if (type === 'value') { - return defaultDecoder(str, defaultDecoder, charset, type).toUpperCase(); - } - throw 'this should never happen! type: ' + type; - }; - - st.deepEqual(qs.parse('KeY=vAlUe', { decoder: decoder }), { key: 'VALUE' }); - st.end(); - }); - - t.end(); -}); diff --git a/node_modules/qs/test/stringify.js b/node_modules/qs/test/stringify.js deleted file mode 100644 index 760d08c..0000000 --- a/node_modules/qs/test/stringify.js +++ /dev/null @@ -1,738 +0,0 @@ -'use strict'; - -var test = require('tape'); -var qs = require('../'); -var utils = require('../lib/utils'); -var iconv = require('iconv-lite'); -var SaferBuffer = require('safer-buffer').Buffer; -var hasSymbols = require('has-symbols'); -var hasBigInt = typeof BigInt === 'function'; - -test('stringify()', function (t) { - t.test('stringifies a querystring object', function (st) { - st.equal(qs.stringify({ a: 'b' }), 'a=b'); - st.equal(qs.stringify({ a: 1 }), 'a=1'); - st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); - st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); - st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); - st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); - st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); - st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); - st.end(); - }); - - t.test('stringifies falsy values', function (st) { - st.equal(qs.stringify(undefined), ''); - st.equal(qs.stringify(null), ''); - st.equal(qs.stringify(null, { strictNullHandling: true }), ''); - st.equal(qs.stringify(false), ''); - st.equal(qs.stringify(0), ''); - st.end(); - }); - - t.test('stringifies symbols', { skip: !hasSymbols() }, function (st) { - st.equal(qs.stringify(Symbol.iterator), ''); - st.equal(qs.stringify([Symbol.iterator]), '0=Symbol%28Symbol.iterator%29'); - st.equal(qs.stringify({ a: Symbol.iterator }), 'a=Symbol%28Symbol.iterator%29'); - st.equal( - qs.stringify({ a: [Symbol.iterator] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - 'a[]=Symbol%28Symbol.iterator%29' - ); - st.end(); - }); - - t.test('stringifies bigints', { skip: !hasBigInt }, function (st) { - var three = BigInt(3); - var encodeWithN = function (value, defaultEncoder, charset) { - var result = defaultEncoder(value, defaultEncoder, charset); - return typeof value === 'bigint' ? result + 'n' : result; - }; - st.equal(qs.stringify(three), ''); - st.equal(qs.stringify([three]), '0=3'); - st.equal(qs.stringify([three], { encoder: encodeWithN }), '0=3n'); - st.equal(qs.stringify({ a: three }), 'a=3'); - st.equal(qs.stringify({ a: three }, { encoder: encodeWithN }), 'a=3n'); - st.equal( - qs.stringify({ a: [three] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), - 'a[]=3' - ); - st.equal( - qs.stringify({ a: [three] }, { encodeValuesOnly: true, encoder: encodeWithN, arrayFormat: 'brackets' }), - 'a[]=3n' - ); - st.end(); - }); - - t.test('adds query prefix', function (st) { - st.equal(qs.stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b'); - st.end(); - }); - - t.test('with query prefix, outputs blank string given an empty object', function (st) { - st.equal(qs.stringify({}, { addQueryPrefix: true }), ''); - st.end(); - }); - - t.test('stringifies nested falsy values', function (st) { - st.equal(qs.stringify({ a: { b: { c: null } } }), 'a%5Bb%5D%5Bc%5D='); - st.equal(qs.stringify({ a: { b: { c: null } } }, { strictNullHandling: true }), 'a%5Bb%5D%5Bc%5D'); - st.equal(qs.stringify({ a: { b: { c: false } } }), 'a%5Bb%5D%5Bc%5D=false'); - st.end(); - }); - - t.test('stringifies a nested object', function (st) { - st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); - st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); - st.end(); - }); - - t.test('stringifies a nested object with dots notation', function (st) { - st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); - st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); - st.end(); - }); - - t.test('stringifies an array value', function (st) { - st.equal( - qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), - 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', - 'indices => indices' - ); - st.equal( - qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), - 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', - 'brackets => brackets' - ); - st.equal( - qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' }), - 'a=b%2Cc%2Cd', - 'comma => comma' - ); - st.equal( - qs.stringify({ a: ['b', 'c', 'd'] }), - 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', - 'default => indices' - ); - st.end(); - }); - - t.test('omits nulls when asked', function (st) { - st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); - st.end(); - }); - - t.test('omits nested nulls when asked', function (st) { - st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); - st.end(); - }); - - t.test('omits array indices when asked', function (st) { - st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); - st.end(); - }); - - t.test('stringifies a nested array value', function (st) { - st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'indices' }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); - st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'brackets' }), 'a%5Bb%5D%5B%5D=c&a%5Bb%5D%5B%5D=d'); - st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'comma' }), 'a%5Bb%5D=c%2Cd'); // a[b]=c,d - st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); - st.end(); - }); - - t.test('stringifies a nested array value with dots notation', function (st) { - st.equal( - qs.stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encode: false, arrayFormat: 'indices' } - ), - 'a.b[0]=c&a.b[1]=d', - 'indices: stringifies with dots + indices' - ); - st.equal( - qs.stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encode: false, arrayFormat: 'brackets' } - ), - 'a.b[]=c&a.b[]=d', - 'brackets: stringifies with dots + brackets' - ); - st.equal( - qs.stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encode: false, arrayFormat: 'comma' } - ), - 'a.b=c,d', - 'comma: stringifies with dots + comma' - ); - st.equal( - qs.stringify( - { a: { b: ['c', 'd'] } }, - { allowDots: true, encode: false } - ), - 'a.b[0]=c&a.b[1]=d', - 'default: stringifies with dots + indices' - ); - st.end(); - }); - - t.test('stringifies an object inside an array', function (st) { - st.equal( - qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }), - 'a%5B0%5D%5Bb%5D=c', // a[0][b]=c - 'indices => brackets' - ); - st.equal( - qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }), - 'a%5B%5D%5Bb%5D=c', // a[][b]=c - 'brackets => brackets' - ); - st.equal( - qs.stringify({ a: [{ b: 'c' }] }), - 'a%5B0%5D%5Bb%5D=c', - 'default => indices' - ); - - st.equal( - qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }), - 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', - 'indices => indices' - ); - - st.equal( - qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }), - 'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1', - 'brackets => brackets' - ); - - st.equal( - qs.stringify({ a: [{ b: { c: [1] } }] }), - 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', - 'default => indices' - ); - - st.end(); - }); - - t.test('stringifies an array with mixed objects and primitives', function (st) { - st.equal( - qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'indices' }), - 'a[0][b]=1&a[1]=2&a[2]=3', - 'indices => indices' - ); - st.equal( - qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'brackets' }), - 'a[][b]=1&a[]=2&a[]=3', - 'brackets => brackets' - ); - st.equal( - qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), - 'a[0][b]=1&a[1]=2&a[2]=3', - 'default => indices' - ); - - st.end(); - }); - - t.test('stringifies an object inside an array with dots notation', function (st) { - st.equal( - qs.stringify( - { a: [{ b: 'c' }] }, - { allowDots: true, encode: false, arrayFormat: 'indices' } - ), - 'a[0].b=c', - 'indices => indices' - ); - st.equal( - qs.stringify( - { a: [{ b: 'c' }] }, - { allowDots: true, encode: false, arrayFormat: 'brackets' } - ), - 'a[].b=c', - 'brackets => brackets' - ); - st.equal( - qs.stringify( - { a: [{ b: 'c' }] }, - { allowDots: true, encode: false } - ), - 'a[0].b=c', - 'default => indices' - ); - - st.equal( - qs.stringify( - { a: [{ b: { c: [1] } }] }, - { allowDots: true, encode: false, arrayFormat: 'indices' } - ), - 'a[0].b.c[0]=1', - 'indices => indices' - ); - st.equal( - qs.stringify( - { a: [{ b: { c: [1] } }] }, - { allowDots: true, encode: false, arrayFormat: 'brackets' } - ), - 'a[].b.c[]=1', - 'brackets => brackets' - ); - st.equal( - qs.stringify( - { a: [{ b: { c: [1] } }] }, - { allowDots: true, encode: false } - ), - 'a[0].b.c[0]=1', - 'default => indices' - ); - - st.end(); - }); - - t.test('does not omit object keys when indices = false', function (st) { - st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); - st.end(); - }); - - t.test('uses indices notation for arrays when indices=true', function (st) { - st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); - st.end(); - }); - - t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { - st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); - st.end(); - }); - - t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { - st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); - st.end(); - }); - - t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { - st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); - st.end(); - }); - - t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { - st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); - st.end(); - }); - - t.test('stringifies a complicated object', function (st) { - st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); - st.end(); - }); - - t.test('stringifies an empty value', function (st) { - st.equal(qs.stringify({ a: '' }), 'a='); - st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); - - st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); - st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); - - st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); - st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); - st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); - - st.end(); - }); - - t.test('stringifies a null object', { skip: !Object.create }, function (st) { - var obj = Object.create(null); - obj.a = 'b'; - st.equal(qs.stringify(obj), 'a=b'); - st.end(); - }); - - t.test('returns an empty string for invalid input', function (st) { - st.equal(qs.stringify(undefined), ''); - st.equal(qs.stringify(false), ''); - st.equal(qs.stringify(null), ''); - st.equal(qs.stringify(''), ''); - st.end(); - }); - - t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) { - var obj = { a: Object.create(null) }; - - obj.a.b = 'c'; - st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); - st.end(); - }); - - t.test('drops keys with a value of undefined', function (st) { - st.equal(qs.stringify({ a: undefined }), ''); - - st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); - st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); - st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); - st.end(); - }); - - t.test('url encodes values', function (st) { - st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); - st.end(); - }); - - t.test('stringifies a date', function (st) { - var now = new Date(); - var str = 'a=' + encodeURIComponent(now.toISOString()); - st.equal(qs.stringify({ a: now }), str); - st.end(); - }); - - t.test('stringifies the weird object from qs', function (st) { - st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); - st.end(); - }); - - t.test('skips properties that are part of the object prototype', function (st) { - Object.prototype.crash = 'test'; - st.equal(qs.stringify({ a: 'b' }), 'a=b'); - st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); - delete Object.prototype.crash; - st.end(); - }); - - t.test('stringifies boolean values', function (st) { - st.equal(qs.stringify({ a: true }), 'a=true'); - st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); - st.equal(qs.stringify({ b: false }), 'b=false'); - st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); - st.end(); - }); - - t.test('stringifies buffer values', function (st) { - st.equal(qs.stringify({ a: SaferBuffer.from('test') }), 'a=test'); - st.equal(qs.stringify({ a: { b: SaferBuffer.from('test') } }), 'a%5Bb%5D=test'); - st.end(); - }); - - t.test('stringifies an object using an alternative delimiter', function (st) { - st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); - st.end(); - }); - - t.test('doesn\'t blow up when Buffer global is missing', function (st) { - var tempBuffer = global.Buffer; - delete global.Buffer; - var result = qs.stringify({ a: 'b', c: 'd' }); - global.Buffer = tempBuffer; - st.equal(result, 'a=b&c=d'); - st.end(); - }); - - t.test('selects properties when filter=array', function (st) { - st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); - st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); - - st.equal( - qs.stringify( - { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' } - ), - 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', - 'indices => indices' - ); - st.equal( - qs.stringify( - { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' } - ), - 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', - 'brackets => brackets' - ); - st.equal( - qs.stringify( - { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, - { filter: ['a', 'b', 0, 2] } - ), - 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', - 'default => indices' - ); - - st.end(); - }); - - t.test('supports custom representations when filter=function', function (st) { - var calls = 0; - var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; - var filterFunc = function (prefix, value) { - calls += 1; - if (calls === 1) { - st.equal(prefix, '', 'prefix is empty'); - st.equal(value, obj); - } else if (prefix === 'c') { - return void 0; - } else if (value instanceof Date) { - st.equal(prefix, 'e[f]'); - return value.getTime(); - } - return value; - }; - - st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); - st.equal(calls, 5); - st.end(); - }); - - t.test('can disable uri encoding', function (st) { - st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); - st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); - st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); - st.end(); - }); - - t.test('can sort the keys', function (st) { - var sort = function (a, b) { - return a.localeCompare(b); - }; - st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); - st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); - st.end(); - }); - - t.test('can sort the keys at depth 3 or more too', function (st) { - var sort = function (a, b) { - return a.localeCompare(b); - }; - st.equal( - qs.stringify( - { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - { sort: sort, encode: false } - ), - 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb' - ); - st.equal( - qs.stringify( - { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, - { sort: null, encode: false } - ), - 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b' - ); - st.end(); - }); - - t.test('can stringify with custom encoding', function (st) { - st.equal(qs.stringify({ 県: '大阪府', '': '' }, { - encoder: function (str) { - if (str.length === 0) { - return ''; - } - var buf = iconv.encode(str, 'shiftjis'); - var result = []; - for (var i = 0; i < buf.length; ++i) { - result.push(buf.readUInt8(i).toString(16)); - } - return '%' + result.join('%'); - } - }), '%8c%a7=%91%e5%8d%e3%95%7b&='); - st.end(); - }); - - t.test('receives the default encoder as a second argument', function (st) { - st.plan(2); - qs.stringify({ a: 1 }, { - encoder: function (str, defaultEncoder) { - st.equal(defaultEncoder, utils.encode); - } - }); - st.end(); - }); - - t.test('throws error with wrong encoder', function (st) { - st['throws'](function () { - qs.stringify({}, { encoder: 'string' }); - }, new TypeError('Encoder has to be a function.')); - st.end(); - }); - - t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) { - st.equal(qs.stringify({ a: SaferBuffer.from([1]) }, { - encoder: function (buffer) { - if (typeof buffer === 'string') { - return buffer; - } - return String.fromCharCode(buffer.readUInt8(0) + 97); - } - }), 'a=b'); - - st.equal(qs.stringify({ a: SaferBuffer.from('a b') }, { - encoder: function (buffer) { - return buffer; - } - }), 'a=a b'); - st.end(); - }); - - t.test('serializeDate option', function (st) { - var date = new Date(); - st.equal( - qs.stringify({ a: date }), - 'a=' + date.toISOString().replace(/:/g, '%3A'), - 'default is toISOString' - ); - - var mutatedDate = new Date(); - mutatedDate.toISOString = function () { - throw new SyntaxError(); - }; - st['throws'](function () { - mutatedDate.toISOString(); - }, SyntaxError); - st.equal( - qs.stringify({ a: mutatedDate }), - 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), - 'toISOString works even when method is not locally present' - ); - - var specificDate = new Date(6); - st.equal( - qs.stringify( - { a: specificDate }, - { serializeDate: function (d) { return d.getTime() * 7; } } - ), - 'a=42', - 'custom serializeDate function called' - ); - - st.end(); - }); - - t.test('RFC 1738 spaces serialization', function (st) { - st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c'); - st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d'); - st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC1738 }), 'a+b=a+b'); - st.end(); - }); - - t.test('RFC 3986 spaces serialization', function (st) { - st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c'); - st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d'); - st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC3986 }), 'a%20b=a%20b'); - st.end(); - }); - - t.test('Backward compatibility to RFC 3986', function (st) { - st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); - st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }), 'a%20b=a%20b'); - st.end(); - }); - - t.test('Edge cases and unknown formats', function (st) { - ['UFO1234', false, 1234, null, {}, []].forEach( - function (format) { - st['throws']( - function () { - qs.stringify({ a: 'b c' }, { format: format }); - }, - new TypeError('Unknown format option provided.') - ); - } - ); - st.end(); - }); - - t.test('encodeValuesOnly', function (st) { - st.equal( - qs.stringify( - { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, - { encodeValuesOnly: true } - ), - 'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h' - ); - st.equal( - qs.stringify( - { a: 'b', c: ['d', 'e'], f: [['g'], ['h']] } - ), - 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h' - ); - st.end(); - }); - - t.test('encodeValuesOnly - strictNullHandling', function (st) { - st.equal( - qs.stringify( - { a: { b: null } }, - { encodeValuesOnly: true, strictNullHandling: true } - ), - 'a[b]' - ); - st.end(); - }); - - t.test('throws if an invalid charset is specified', function (st) { - st['throws'](function () { - qs.stringify({ a: 'b' }, { charset: 'foobar' }); - }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); - st.end(); - }); - - t.test('respects a charset of iso-8859-1', function (st) { - st.equal(qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }), '%E6=%E6'); - st.end(); - }); - - t.test('encodes unrepresentable chars as numeric entities in iso-8859-1 mode', function (st) { - st.equal(qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }), 'a=%26%239786%3B'); - st.end(); - }); - - t.test('respects an explicit charset of utf-8 (the default)', function (st) { - st.equal(qs.stringify({ a: 'æ' }, { charset: 'utf-8' }), 'a=%C3%A6'); - st.end(); - }); - - t.test('adds the right sentinel when instructed to and the charset is utf-8', function (st) { - st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' }), 'utf8=%E2%9C%93&a=%C3%A6'); - st.end(); - }); - - t.test('adds the right sentinel when instructed to and the charset is iso-8859-1', function (st) { - st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }), 'utf8=%26%2310003%3B&a=%E6'); - st.end(); - }); - - t.test('does not mutate the options argument', function (st) { - var options = {}; - qs.stringify({}, options); - st.deepEqual(options, {}); - st.end(); - }); - - t.test('strictNullHandling works with custom filter', function (st) { - var filter = function (prefix, value) { - return value; - }; - - var options = { strictNullHandling: true, filter: filter }; - st.equal(qs.stringify({ key: null }, options), 'key'); - st.end(); - }); - - t.test('strictNullHandling works with null serializeDate', function (st) { - var serializeDate = function () { - return null; - }; - var options = { strictNullHandling: true, serializeDate: serializeDate }; - var date = new Date(); - st.equal(qs.stringify({ key: date }, options), 'key'); - st.end(); - }); - - t.test('allows for encoding keys and values differently', function (st) { - var encoder = function (str, defaultEncoder, charset, type) { - if (type === 'key') { - return defaultEncoder(str, defaultEncoder, charset, type).toLowerCase(); - } - if (type === 'value') { - return defaultEncoder(str, defaultEncoder, charset, type).toUpperCase(); - } - throw 'this should never happen! type: ' + type; - }; - - st.deepEqual(qs.stringify({ KeY: 'vAlUe' }, { encoder: encoder }), 'key=VALUE'); - st.end(); - }); - - t.end(); -}); diff --git a/node_modules/qs/test/utils.js b/node_modules/qs/test/utils.js deleted file mode 100644 index aa84dfd..0000000 --- a/node_modules/qs/test/utils.js +++ /dev/null @@ -1,136 +0,0 @@ -'use strict'; - -var test = require('tape'); -var inspect = require('object-inspect'); -var SaferBuffer = require('safer-buffer').Buffer; -var forEach = require('for-each'); -var utils = require('../lib/utils'); - -test('merge()', function (t) { - t.deepEqual(utils.merge(null, true), [null, true], 'merges true into null'); - - t.deepEqual(utils.merge(null, [42]), [null, 42], 'merges null into an array'); - - t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); - - var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } }); - t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array'); - - var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); - t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array'); - - var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); - t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array'); - - var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); - t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); - - var noOptionsNonObjectSource = utils.merge({ foo: 'baz' }, 'bar'); - t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true }); - - t.test( - 'avoids invoking array setters unnecessarily', - { skip: typeof Object.defineProperty !== 'function' }, - function (st) { - var setCount = 0; - var getCount = 0; - var observed = []; - Object.defineProperty(observed, 0, { - get: function () { - getCount += 1; - return { bar: 'baz' }; - }, - set: function () { setCount += 1; } - }); - utils.merge(observed, [null]); - st.equal(setCount, 0); - st.equal(getCount, 1); - observed[0] = observed[0]; // eslint-disable-line no-self-assign - st.equal(setCount, 1); - st.equal(getCount, 2); - st.end(); - } - ); - - t.end(); -}); - -test('assign()', function (t) { - var target = { a: 1, b: 2 }; - var source = { b: 3, c: 4 }; - var result = utils.assign(target, source); - - t.equal(result, target, 'returns the target'); - t.deepEqual(target, { a: 1, b: 3, c: 4 }, 'target and source are merged'); - t.deepEqual(source, { b: 3, c: 4 }, 'source is untouched'); - - t.end(); -}); - -test('combine()', function (t) { - t.test('both arrays', function (st) { - var a = [1]; - var b = [2]; - var combined = utils.combine(a, b); - - st.deepEqual(a, [1], 'a is not mutated'); - st.deepEqual(b, [2], 'b is not mutated'); - st.notEqual(a, combined, 'a !== combined'); - st.notEqual(b, combined, 'b !== combined'); - st.deepEqual(combined, [1, 2], 'combined is a + b'); - - st.end(); - }); - - t.test('one array, one non-array', function (st) { - var aN = 1; - var a = [aN]; - var bN = 2; - var b = [bN]; - - var combinedAnB = utils.combine(aN, b); - st.deepEqual(b, [bN], 'b is not mutated'); - st.notEqual(aN, combinedAnB, 'aN + b !== aN'); - st.notEqual(a, combinedAnB, 'aN + b !== a'); - st.notEqual(bN, combinedAnB, 'aN + b !== bN'); - st.notEqual(b, combinedAnB, 'aN + b !== b'); - st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array'); - - var combinedABn = utils.combine(a, bN); - st.deepEqual(a, [aN], 'a is not mutated'); - st.notEqual(aN, combinedABn, 'a + bN !== aN'); - st.notEqual(a, combinedABn, 'a + bN !== a'); - st.notEqual(bN, combinedABn, 'a + bN !== bN'); - st.notEqual(b, combinedABn, 'a + bN !== b'); - st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array'); - - st.end(); - }); - - t.test('neither is an array', function (st) { - var combined = utils.combine(1, 2); - st.notEqual(1, combined, '1 + 2 !== 1'); - st.notEqual(2, combined, '1 + 2 !== 2'); - st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array'); - - st.end(); - }); - - t.end(); -}); - -test('isBuffer()', function (t) { - forEach([null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g], function (x) { - t.equal(utils.isBuffer(x), false, inspect(x) + ' is not a buffer'); - }); - - var fakeBuffer = { constructor: Buffer }; - t.equal(utils.isBuffer(fakeBuffer), false, 'fake buffer is not a buffer'); - - var saferBuffer = SaferBuffer.from('abc'); - t.equal(utils.isBuffer(saferBuffer), true, 'SaferBuffer instance is a buffer'); - - var buffer = Buffer.from && Buffer.alloc ? Buffer.from('abc') : new Buffer('abc'); - t.equal(utils.isBuffer(buffer), true, 'real Buffer instance is a buffer'); - t.end(); -}); diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md deleted file mode 100644 index f478d58..0000000 --- a/node_modules/readable-stream/CONTRIBUTING.md +++ /dev/null @@ -1,38 +0,0 @@ -# Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -* (a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -* (b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -* (c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -* (d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. - -## Moderation Policy - -The [Node.js Moderation Policy] applies to this WG. - -## Code of Conduct - -The [Node.js Code of Conduct][] applies to this WG. - -[Node.js Code of Conduct]: -https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md -[Node.js Moderation Policy]: -https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md deleted file mode 100644 index 16ffb93..0000000 --- a/node_modules/readable-stream/GOVERNANCE.md +++ /dev/null @@ -1,136 +0,0 @@ -### Streams Working Group - -The Node.js Streams is jointly governed by a Working Group -(WG) -that is responsible for high-level guidance of the project. - -The WG has final authority over this project including: - -* Technical direction -* Project governance and process (including this policy) -* Contribution policy -* GitHub repository hosting -* Conduct guidelines -* Maintaining the list of additional Collaborators - -For the current list of WG members, see the project -[README.md](./README.md#current-project-team-members). - -### Collaborators - -The readable-stream GitHub repository is -maintained by the WG and additional Collaborators who are added by the -WG on an ongoing basis. - -Individuals making significant and valuable contributions are made -Collaborators and given commit-access to the project. These -individuals are identified by the WG and their addition as -Collaborators is discussed during the WG meeting. - -_Note:_ If you make a significant contribution and are not considered -for commit-access log an issue or contact a WG member directly and it -will be brought up in the next WG meeting. - -Modifications of the contents of the readable-stream repository are -made on -a collaborative basis. Anybody with a GitHub account may propose a -modification via pull request and it will be considered by the project -Collaborators. All pull requests must be reviewed and accepted by a -Collaborator with sufficient expertise who is able to take full -responsibility for the change. In the case of pull requests proposed -by an existing Collaborator, an additional Collaborator is required -for sign-off. Consensus should be sought if additional Collaborators -participate and there is disagreement around a particular -modification. See _Consensus Seeking Process_ below for further detail -on the consensus model used for governance. - -Collaborators may opt to elevate significant or controversial -modifications, or modifications that have not found consensus to the -WG for discussion by assigning the ***WG-agenda*** tag to a pull -request or issue. The WG should serve as the final arbiter where -required. - -For the current list of Collaborators, see the project -[README.md](./README.md#members). - -### WG Membership - -WG seats are not time-limited. There is no fixed size of the WG. -However, the expected target is between 6 and 12, to ensure adequate -coverage of important areas of expertise, balanced with the ability to -make decisions efficiently. - -There is no specific set of requirements or qualifications for WG -membership beyond these rules. - -The WG may add additional members to the WG by unanimous consensus. - -A WG member may be removed from the WG by voluntary resignation, or by -unanimous consensus of all other WG members. - -Changes to WG membership should be posted in the agenda, and may be -suggested as any other agenda item (see "WG Meetings" below). - -If an addition or removal is proposed during a meeting, and the full -WG is not in attendance to participate, then the addition or removal -is added to the agenda for the subsequent meeting. This is to ensure -that all members are given the opportunity to participate in all -membership decisions. If a WG member is unable to attend a meeting -where a planned membership decision is being made, then their consent -is assumed. - -No more than 1/3 of the WG members may be affiliated with the same -employer. If removal or resignation of a WG member, or a change of -employment by a WG member, creates a situation where more than 1/3 of -the WG membership shares an employer, then the situation must be -immediately remedied by the resignation or removal of one or more WG -members affiliated with the over-represented employer(s). - -### WG Meetings - -The WG meets occasionally on a Google Hangout On Air. A designated moderator -approved by the WG runs the meeting. Each meeting should be -published to YouTube. - -Items are added to the WG agenda that are considered contentious or -are modifications of governance, contribution policy, WG membership, -or release process. - -The intention of the agenda is not to approve or review all patches; -that should happen continuously on GitHub and be handled by the larger -group of Collaborators. - -Any community member or contributor can ask that something be added to -the next meeting's agenda by logging a GitHub Issue. Any Collaborator, -WG member or the moderator can add the item to the agenda by adding -the ***WG-agenda*** tag to the issue. - -Prior to each WG meeting the moderator will share the Agenda with -members of the WG. WG members can add any items they like to the -agenda at the beginning of each meeting. The moderator and the WG -cannot veto or remove items. - -The WG may invite persons or representatives from certain projects to -participate in a non-voting capacity. - -The moderator is responsible for summarizing the discussion of each -agenda item and sends it as a pull request after the meeting. - -### Consensus Seeking Process - -The WG follows a -[Consensus -Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) -decision-making model. - -When an agenda item has appeared to reach a consensus the moderator -will ask "Does anyone object?" as a final call for dissent from the -consensus. - -If an agenda item cannot reach a consensus a WG member can call for -either a closing vote or a vote to table the issue to the next -meeting. The call for a vote must be seconded by a majority of the WG -or else the discussion will continue. Simple majority wins. - -Note that changes to WG membership require a majority consensus. See -"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE deleted file mode 100644 index 2873b3b..0000000 --- a/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,47 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md deleted file mode 100644 index 6f035ab..0000000 --- a/node_modules/readable-stream/README.md +++ /dev/null @@ -1,106 +0,0 @@ -# readable-stream - -***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) - -```bash -npm install --save readable-stream -``` - -This package is a mirror of the streams implementations in Node.js. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -## Version 3.x.x - -v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: - -1. Error codes: https://github.com/nodejs/node/pull/13310, - https://github.com/nodejs/node/pull/13291, - https://github.com/nodejs/node/pull/16589, - https://github.com/nodejs/node/pull/15042, - https://github.com/nodejs/node/pull/15665, - https://github.com/nodejs/readable-stream/pull/344 -2. 'readable' have precedence over flowing - https://github.com/nodejs/node/pull/18994 -3. make virtual methods errors consistent - https://github.com/nodejs/node/pull/18813 -4. updated streams error handling - https://github.com/nodejs/node/pull/18438 -5. writable.end should return this. - https://github.com/nodejs/node/pull/18780 -6. readable continues to read when push('') - https://github.com/nodejs/node/pull/18211 -7. add custom inspect to BufferList - https://github.com/nodejs/node/pull/17907 -8. always defer 'readable' with nextTick - https://github.com/nodejs/node/pull/17979 - -## Version 2.x.x -v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. - -### Big Thanks - -Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] - -# Usage - -You can swap your `require('stream')` with `require('readable-stream')` -without any changes, if you are just using one of the main classes and -functions. - -```js -const { - Readable, - Writable, - Transform, - Duplex, - pipeline, - finished -} = require('readable-stream') -```` - -Note that `require('stream')` will return `Stream`, while -`require('readable-stream')` will return `Readable`. We discourage using -whatever is exported directly, but rather use one of the properties as -shown in the example above. - -# Streams Working Group - -`readable-stream` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - - -## Team Members - -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> - - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E -* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> -* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> - -[sauce]: https://saucelabs.com diff --git a/node_modules/readable-stream/errors-browser.js b/node_modules/readable-stream/errors-browser.js deleted file mode 100644 index fb8e73e..0000000 --- a/node_modules/readable-stream/errors-browser.js +++ /dev/null @@ -1,127 +0,0 @@ -'use strict'; - -function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } - -var codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error; - } - - function getMessage(arg1, arg2, arg3) { - if (typeof message === 'string') { - return message; - } else { - return message(arg1, arg2, arg3); - } - } - - var NodeError = - /*#__PURE__*/ - function (_Base) { - _inheritsLoose(NodeError, _Base); - - function NodeError(arg1, arg2, arg3) { - return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; - } - - return NodeError; - }(Base); - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - codes[code] = NodeError; -} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js - - -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - var len = expected.length; - expected = expected.map(function (i) { - return String(i); - }); - - if (len > 2) { - return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; - } else if (len === 2) { - return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); - } else { - return "of ".concat(thing, " ").concat(expected[0]); - } - } else { - return "of ".concat(thing, " ").concat(String(expected)); - } -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith - - -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith - - -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - - return str.substring(this_len - search.length, this_len) === search; -} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes - - -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"'; -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - var determiner; - - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - var msg; - - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); - } else { - var type = includes(name, '.') ? 'property' : 'argument'; - msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); - } - - msg += ". Received type ".concat(typeof actual); - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented'; -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg; -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); -module.exports.codes = codes; diff --git a/node_modules/readable-stream/errors.js b/node_modules/readable-stream/errors.js deleted file mode 100644 index 8471526..0000000 --- a/node_modules/readable-stream/errors.js +++ /dev/null @@ -1,116 +0,0 @@ -'use strict'; - -const codes = {}; - -function createErrorType(code, message, Base) { - if (!Base) { - Base = Error - } - - function getMessage (arg1, arg2, arg3) { - if (typeof message === 'string') { - return message - } else { - return message(arg1, arg2, arg3) - } - } - - class NodeError extends Base { - constructor (arg1, arg2, arg3) { - super(getMessage(arg1, arg2, arg3)); - } - } - - NodeError.prototype.name = Base.name; - NodeError.prototype.code = code; - - codes[code] = NodeError; -} - -// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js -function oneOf(expected, thing) { - if (Array.isArray(expected)) { - const len = expected.length; - expected = expected.map((i) => String(i)); - if (len > 2) { - return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + - expected[len - 1]; - } else if (len === 2) { - return `one of ${thing} ${expected[0]} or ${expected[1]}`; - } else { - return `of ${thing} ${expected[0]}`; - } - } else { - return `of ${thing} ${String(expected)}`; - } -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith -function startsWith(str, search, pos) { - return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith -function endsWith(str, search, this_len) { - if (this_len === undefined || this_len > str.length) { - this_len = str.length; - } - return str.substring(this_len - search.length, this_len) === search; -} - -// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes -function includes(str, search, start) { - if (typeof start !== 'number') { - start = 0; - } - - if (start + search.length > str.length) { - return false; - } else { - return str.indexOf(search, start) !== -1; - } -} - -createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { - return 'The value "' + value + '" is invalid for option "' + name + '"' -}, TypeError); -createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { - // determiner: 'must be' or 'must not be' - let determiner; - if (typeof expected === 'string' && startsWith(expected, 'not ')) { - determiner = 'must not be'; - expected = expected.replace(/^not /, ''); - } else { - determiner = 'must be'; - } - - let msg; - if (endsWith(name, ' argument')) { - // For cases like 'first argument' - msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; - } else { - const type = includes(name, '.') ? 'property' : 'argument'; - msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; - } - - msg += `. Received type ${typeof actual}`; - return msg; -}, TypeError); -createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); -createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { - return 'The ' + name + ' method is not implemented' -}); -createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); -createErrorType('ERR_STREAM_DESTROYED', function (name) { - return 'Cannot call ' + name + ' after a stream was destroyed'; -}); -createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); -createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); -createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); -createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); -createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { - return 'Unknown encoding: ' + arg -}, TypeError); -createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - -module.exports.codes = codes; diff --git a/node_modules/readable-stream/experimentalWarning.js b/node_modules/readable-stream/experimentalWarning.js deleted file mode 100644 index 78e8414..0000000 --- a/node_modules/readable-stream/experimentalWarning.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -var experimentalWarnings = new Set(); - -function emitExperimentalWarning(feature) { - if (experimentalWarnings.has(feature)) return; - var msg = feature + ' is an experimental feature. This feature could ' + - 'change at any time'; - experimentalWarnings.add(feature); - process.emitWarning(msg, 'ExperimentalWarning'); -} - -function noop() {} - -module.exports.emitExperimentalWarning = process.emitWarning - ? emitExperimentalWarning - : noop; diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 6752519..0000000 --- a/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. -'use strict'; -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - - for (var key in obj) { - keys.push(key); - } - - return keys; -}; -/**/ - - -module.exports = Duplex; - -var Readable = require('./_stream_readable'); - -var Writable = require('./_stream_writable'); - -require('inherits')(Duplex, Readable); - -{ - // Allow the keys array to be GC'ed. - var keys = objectKeys(Writable.prototype); - - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - this.allowHalfOpen = true; - - if (options) { - if (options.readable === false) this.readable = false; - if (options.writable === false) this.writable = false; - - if (options.allowHalfOpen === false) { - this.allowHalfOpen = false; - this.once('end', onend); - } - } -} - -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); -Object.defineProperty(Duplex.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); -Object.defineProperty(Duplex.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); // the no-half-open enforcer - -function onend() { - // If the writable side ended, then we're ok. - if (this._writableState.ended) return; // no more data can be written. - // But allow more writes to happen in this tick. - - process.nextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -Object.defineProperty(Duplex.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - this._writableState.destroyed = value; - } -}); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index 32e7414..0000000 --- a/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -require('inherits')(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 192d451..0000000 --- a/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,1124 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -'use strict'; - -module.exports = Readable; -/**/ - -var Duplex; -/**/ - -Readable.ReadableState = ReadableState; -/**/ - -var EE = require('events').EventEmitter; - -var EElistenerCount = function EElistenerCount(emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ - - -var Stream = require('./internal/streams/stream'); -/**/ - - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} -/**/ - - -var debugUtil = require('util'); - -var debug; - -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function debug() {}; -} -/**/ - - -var BufferList = require('./internal/streams/buffer_list'); - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. - - -var StringDecoder; -var createReadableStreamAsyncIterator; -var from; - -require('inherits')(Readable, Stream); - -var errorOrDestroy = destroyImpl.errorOrDestroy; -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; - -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} - -function ReadableState(options, stream, isDuplex) { - Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - - this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - - this.sync = true; // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.paused = true; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') - - this.autoDestroy = !!options.autoDestroy; // has it been destroyed - - this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - - this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled - - this.readingMore = false; - this.decoder = null; - this.encoding = null; - - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - this._readableState = new ReadableState(options, this, isDuplex); // legacy - - this.readable = true; - - if (options) { - if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } - - Stream.call(this); -} - -Object.defineProperty(Readable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._readableState === undefined) { - return false; - } - - return this._readableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._readableState.destroyed = value; - } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; - -Readable.prototype._destroy = function (err, cb) { - cb(err); -}; // Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. - - -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; - } - - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } - - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; // Unshift should *always* be something directly out of read() - - -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; - -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - debug('readableAddChunk', chunk); - var state = stream._readableState; - - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - - if (er) { - errorOrDestroy(stream, er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (addToFront) { - if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); - } else if (state.destroyed) { - return false; - } else { - state.reading = false; - - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - maybeReadMore(stream, state); - } - } // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - - - return !state.ended && (state.length < state.highWaterMark || state.length === 0); -} - -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - state.awaitDrain = 0; - stream.emit('data', chunk); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); - } - - maybeReadMore(stream, state); -} - -function chunkInvalid(state, chunk) { - var er; - - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); - } - - return er; -} - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; // backwards compatibility. - - -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - var decoder = new StringDecoder(enc); - this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 - - this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: - - var p = this._readableState.buffer.head; - var content = ''; - - while (p !== null) { - content += decoder.write(p.data); - p = p.next; - } - - this._readableState.buffer.clear(); - - if (content !== '') this._readableState.buffer.push(content); - this._readableState.length = content.length; - return this; -}; // Don't raise the hwm > 1GB - - -var MAX_HWM = 0x40000000; - -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - - return n; -} // This function is designed to be inlinable, so please take care when making -// changes to the function body. - - -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } // If we're asking for more than the current hwm, then raise the hwm. - - - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; // Don't have enough - - if (!state.ended) { - state.needReadable = true; - return 0; - } - - return state.length; -} // you can override either this method, or the async _read(n) below. - - -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - - if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - - - var doRead = state.needReadable; - debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some - - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - - - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; // if the length is currently zero, then we *need* a readable event. - - if (state.length === 0) state.needReadable = true; // call internal read method - - this._read(state.highWaterMark); - - state.sync = false; // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark; - n = 0; - } else { - state.length -= n; - state.awaitDrain = 0; - } - - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. - - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null) this.emit('data', ret); - return ret; -}; - -function onEofChunk(stream, state) { - debug('onEofChunk'); - if (state.ended) return; - - if (state.decoder) { - var chunk = state.decoder.end(); - - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - - state.ended = true; - - if (state.sync) { - // if we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call - emitReadable(stream); - } else { - // emit 'readable' now to make sure it gets picked up. - state.needReadable = false; - - if (!state.emittedReadable) { - state.emittedReadable = true; - emitReadable_(stream); - } - } -} // Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. - - -function emitReadable(stream) { - var state = stream._readableState; - debug('emitReadable', state.needReadable, state.emittedReadable); - state.needReadable = false; - - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - process.nextTick(emitReadable_, stream); - } -} - -function emitReadable_(stream) { - var state = stream._readableState; - debug('emitReadable_', state.destroyed, state.length, state.ended); - - if (!state.destroyed && (state.length || state.ended)) { - stream.emit('readable'); - state.emittedReadable = false; - } // The stream needs another readable event if - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - - - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; - flow(stream); -} // at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. - - -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - process.nextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { - var len = state.length; - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) // didn't get any data, stop spinning. - break; - } - - state.readingMore = false; -} // abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. - - -Readable.prototype._read = function (n) { - errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - - case 1: - state.pipes = [state.pipes, dest]; - break; - - default: - state.pipes.push(dest); - break; - } - - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); - dest.on('unpipe', onunpipe); - - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - - function onend() { - debug('onend'); - dest.end(); - } // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - - - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - var cleanedUp = false; - - function cleanup() { - debug('cleanup'); // cleanup event handlers once the pipe is broken - - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - debug('dest.write', ret); - - if (ret === false) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - } - - src.pause(); - } - } // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - - - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); - } // Make sure our error handler is attached before userland ones. - - - prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. - - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - - dest.once('close', onclose); - - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } // tell the dest that it's being piped to - - - dest.emit('pipe', src); // start the flow if it hasn't been started already. - - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function pipeOnDrainFunctionResult() { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { - hasUnpiped: false - }; // if we're not piping anywhere, then do nothing. - - if (state.pipesCount === 0) return this; // just one destination. most common case. - - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; // got a match. - - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; - } // slow case. multiple pipe destinations. - - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { - hasUnpiped: false - }); - } - - return this; - } // try to find the right one. - - - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; // set up data events if they are asked for -// Ensure readable listeners eventually get something - - -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - var state = this._readableState; - - if (ev === 'data') { - // update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused - - if (state.flowing !== false) this.resume(); - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.flowing = false; - state.emittedReadable = false; - debug('on readable', state.length, state.reading); - - if (state.length) { - emitReadable(this); - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this); - } - } - } - - return res; -}; - -Readable.prototype.addListener = Readable.prototype.on; - -Readable.prototype.removeListener = function (ev, fn) { - var res = Stream.prototype.removeListener.call(this, ev, fn); - - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -Readable.prototype.removeAllListeners = function (ev) { - var res = Stream.prototype.removeAllListeners.apply(this, arguments); - - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this); - } - - return res; -}; - -function updateReadableListening(self) { - var state = self._readableState; - state.readableListening = self.listenerCount('readable') > 0; - - if (state.resumeScheduled && !state.paused) { - // flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true; // crude way to check if we should resume - } else if (self.listenerCount('data') > 0) { - self.resume(); - } -} - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} // pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. - - -Readable.prototype.resume = function () { - var state = this._readableState; - - if (!state.flowing) { - debug('resume'); // we flow only if there is no one listening - // for readable, but we still have to call - // resume() - - state.flowing = !state.readableListening; - resume(this, state); - } - - state.paused = false; - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - process.nextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - debug('resume', state.reading); - - if (!state.reading) { - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - - if (this._readableState.flowing !== false) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - - this._readableState.paused = true; - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - - while (state.flowing && stream.read() !== null) { - ; - } -} // wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. - - -Readable.prototype.wrap = function (stream) { - var _this = this; - - var state = this._readableState; - var paused = false; - stream.on('end', function () { - debug('wrapped end'); - - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode - - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = _this.push(chunk); - - if (!ret) { - paused = true; - stream.pause(); - } - }); // proxy all the other methods. - // important when wrapping filters and duplexes. - - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function methodWrap(method) { - return function methodWrapReturnFunction() { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } // proxy certain important events. - - - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } // when we try to consume some more bytes, simply unpause the - // underlying stream. - - - this._read = function (n) { - debug('wrapped _read', n); - - if (paused) { - paused = false; - stream.resume(); - } - }; - - return this; -}; - -if (typeof Symbol === 'function') { - Readable.prototype[Symbol.asyncIterator] = function () { - if (createReadableStreamAsyncIterator === undefined) { - createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); - } - - return createReadableStreamAsyncIterator(this); - }; -} - -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.highWaterMark; - } -}); -Object.defineProperty(Readable.prototype, 'readableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState && this._readableState.buffer; - } -}); -Object.defineProperty(Readable.prototype, 'readableFlowing', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.flowing; - }, - set: function set(state) { - if (this._readableState) { - this._readableState.flowing = state; - } - } -}); // exposed for testing purposes only. - -Readable._fromList = fromList; -Object.defineProperty(Readable.prototype, 'readableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._readableState.length; - } -}); // Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. - -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = state.buffer.consume(n, state.decoder); - } - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - debug('endReadable', state.endEmitted); - - if (!state.endEmitted) { - state.ended = true; - process.nextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. - - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well - var wState = stream._writableState; - - if (!wState || wState.autoDestroy && wState.finished) { - stream.destroy(); - } - } - } -} - -if (typeof Symbol === 'function') { - Readable.from = function (iterable, opts) { - if (from === undefined) { - from = require('./internal/streams/from'); - } - - return from(Readable, iterable, opts); - }; -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - - return -1; -} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 41a738c..0000000 --- a/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. -'use strict'; - -module.exports = Transform; - -var _require$codes = require('../errors').codes, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, - ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; - -var Duplex = require('./_stream_duplex'); - -require('inherits')(Transform, Duplex); - -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - var cb = ts.writecb; - - if (cb === null) { - return this.emit('error', new ERR_MULTIPLE_CALLBACK()); - } - - ts.writechunk = null; - ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); - cb(er); - var rs = this._readableState; - rs.reading = false; - - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; // start out asking for a readable event once data is transformed. - - this._readableState.needReadable = true; // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } // When the writable side finishes, then flush out anything remaining. - - - this.on('prefinish', prefinish); -} - -function prefinish() { - var _this = this; - - if (typeof this._flush === 'function' && !this._readableState.destroyed) { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); - } -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; // This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. - - -Transform.prototype._transform = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; // Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. - - -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && !ts.transforming) { - ts.transforming = true; - - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -Transform.prototype._destroy = function (err, cb) { - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - }); -}; - -function done(stream, er, data) { - if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); // TODO(BridgeAR): Write a test for these two error cases - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - - if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); - if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); - return stream.push(null); -} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index a2634d7..0000000 --- a/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,697 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. -'use strict'; - -module.exports = Writable; -/* */ - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} // It seems a linked list but it is not -// there will be only 2 of these for each stream - - -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ - -/**/ - - -var Duplex; -/**/ - -Writable.WritableState = WritableState; -/**/ - -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ - -var Stream = require('./internal/streams/stream'); -/**/ - - -var Buffer = require('buffer').Buffer; - -var OurUint8Array = global.Uint8Array || function () {}; - -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} - -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} - -var destroyImpl = require('./internal/streams/destroy'); - -var _require = require('./internal/streams/state'), - getHighWaterMark = _require.getHighWaterMark; - -var _require$codes = require('../errors').codes, - ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, - ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; - -var errorOrDestroy = destroyImpl.errorOrDestroy; - -require('inherits')(Writable, Stream); - -function nop() {} - -function WritableState(options, stream, isDuplex) { - Duplex = Duplex || require('./_stream_duplex'); - options = options || {}; // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream - // contains buffers or objects. - - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - - this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called - - this.finalCalled = false; // drain event flag. - - this.needDrain = false; // at the start of calling end() - - this.ending = false; // when end() has been called, and returned - - this.ended = false; // when 'finish' is emitted - - this.finished = false; // has it been destroyed - - this.destroyed = false; // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - - this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - - this.length = 0; // a flag to see when we're in the middle of a write. - - this.writing = false; // when true all writes will be buffered until .uncork() call - - this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - - this.sync = true; // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - - this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - - this.onwrite = function (er) { - onwrite(stream, er); - }; // the callback that the user supplies to write(chunk,encoding,cb) - - - this.writecb = null; // the amount that is being written when _write is called. - - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - - this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - - this.prefinished = false; // True if the error was already emitted and should not be thrown again - - this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - - this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') - - this.autoDestroy = !!options.autoDestroy; // count buffered requests - - this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - - this.corkedRequestsFree = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - - while (current) { - out.push(current); - current = current.next; - } - - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function writableStateBufferGetter() { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); // Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. - - -var realHasInstance; - -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function value(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - } - }); -} else { - realHasInstance = function realHasInstance(object) { - return object instanceof this; - }; -} - -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5 - - var isDuplex = this instanceof Duplex; - if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); - this._writableState = new WritableState(options, this, isDuplex); // legacy. - - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; - } - - Stream.call(this); -} // Otherwise people can pipe Writable streams, which is just wrong. - - -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); -}; - -function writeAfterEnd(stream, cb) { - var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb - - errorOrDestroy(stream, er); - process.nextTick(cb, er); -} // Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. - - -function validChunk(stream, state, chunk, cb) { - var er; - - if (chunk === null) { - er = new ERR_STREAM_NULL_VALUES(); - } else if (typeof chunk !== 'string' && !state.objectMode) { - er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); - } - - if (er) { - errorOrDestroy(stream, er); - process.nextTick(cb, er); - return false; - } - - return true; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - var isBuf = !state.objectMode && _isUint8Array(chunk); - - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } - return ret; -}; - -Writable.prototype.cork = function () { - this._writableState.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableBuffer', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState && this._writableState.getBuffer(); - } -}); - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); - } - - return chunk; -} - -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.highWaterMark; - } -}); // if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. - -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } - } - - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - process.nextTick(cb, er); // this can emit finish, and it will always happen - // after error - - process.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - errorOrDestroy(stream, er); // this can emit finish, but finish must - // always follow error - - finishMaybe(stream, state); - } -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state) || stream.destroyed; - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - process.nextTick(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} // Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. - - -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} // if there's something in the buffer waiting, then process it - - -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - - state.pendingcb++; - state.lastBufferedRequest = null; - - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - - if (state.corked) { - state.corked = 1; - this.uncork(); - } // ignore unnecessary end() calls. - - - if (!state.ending) endWritable(this, state, cb); - return this; -}; - -Object.defineProperty(Writable.prototype, 'writableLength', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - return this._writableState.length; - } -}); - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - - if (err) { - errorOrDestroy(stream, err); - } - - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} - -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.pendingcb++; - state.finalCalled = true; - process.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - - if (need) { - prefinish(stream, state); - - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); - - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well - var rState = stream._readableState; - - if (!rState || rState.autoDestroy && rState.endEmitted) { - stream.destroy(); - } - } - } - } - - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - - if (cb) { - if (state.finished) process.nextTick(cb);else stream.once('finish', cb); - } - - state.ended = true; - stream.writable = false; -} - -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } // reuse the free corkReq. - - - state.corkedRequestsFree.next = corkReq; -} - -Object.defineProperty(Writable.prototype, 'destroyed', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function get() { - if (this._writableState === undefined) { - return false; - } - - return this._writableState.destroyed; - }, - set: function set(value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } // backward compatibility, the user is explicitly - // managing destroyed - - - this._writableState.destroyed = value; - } -}); -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; - -Writable.prototype._destroy = function (err, cb) { - cb(err); -}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/readable-stream/lib/internal/streams/async_iterator.js deleted file mode 100644 index 9fb615a..0000000 --- a/node_modules/readable-stream/lib/internal/streams/async_iterator.js +++ /dev/null @@ -1,207 +0,0 @@ -'use strict'; - -var _Object$setPrototypeO; - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var finished = require('./end-of-stream'); - -var kLastResolve = Symbol('lastResolve'); -var kLastReject = Symbol('lastReject'); -var kError = Symbol('error'); -var kEnded = Symbol('ended'); -var kLastPromise = Symbol('lastPromise'); -var kHandlePromise = Symbol('handlePromise'); -var kStream = Symbol('stream'); - -function createIterResult(value, done) { - return { - value: value, - done: done - }; -} - -function readAndResolve(iter) { - var resolve = iter[kLastResolve]; - - if (resolve !== null) { - var data = iter[kStream].read(); // we defer if data is null - // we can be expecting either 'end' or - // 'error' - - if (data !== null) { - iter[kLastPromise] = null; - iter[kLastResolve] = null; - iter[kLastReject] = null; - resolve(createIterResult(data, false)); - } - } -} - -function onReadable(iter) { - // we wait for the next tick, because it might - // emit an error with process.nextTick - process.nextTick(readAndResolve, iter); -} - -function wrapForNext(lastPromise, iter) { - return function (resolve, reject) { - lastPromise.then(function () { - if (iter[kEnded]) { - resolve(createIterResult(undefined, true)); - return; - } - - iter[kHandlePromise](resolve, reject); - }, reject); - }; -} - -var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); -var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { - get stream() { - return this[kStream]; - }, - - next: function next() { - var _this = this; - - // if we have detected an error in the meanwhile - // reject straight away - var error = this[kError]; - - if (error !== null) { - return Promise.reject(error); - } - - if (this[kEnded]) { - return Promise.resolve(createIterResult(undefined, true)); - } - - if (this[kStream].destroyed) { - // We need to defer via nextTick because if .destroy(err) is - // called, the error will be emitted via nextTick, and - // we cannot guarantee that there is no error lingering around - // waiting to be emitted. - return new Promise(function (resolve, reject) { - process.nextTick(function () { - if (_this[kError]) { - reject(_this[kError]); - } else { - resolve(createIterResult(undefined, true)); - } - }); - }); - } // if we have multiple next() calls - // we will wait for the previous Promise to finish - // this logic is optimized to support for await loops, - // where next() is only called once at a time - - - var lastPromise = this[kLastPromise]; - var promise; - - if (lastPromise) { - promise = new Promise(wrapForNext(lastPromise, this)); - } else { - // fast path needed to support multiple this.push() - // without triggering the next() queue - var data = this[kStream].read(); - - if (data !== null) { - return Promise.resolve(createIterResult(data, false)); - } - - promise = new Promise(this[kHandlePromise]); - } - - this[kLastPromise] = promise; - return promise; - } -}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { - return this; -}), _defineProperty(_Object$setPrototypeO, "return", function _return() { - var _this2 = this; - - // destroy(err, cb) is a private API - // we can guarantee we have that here, because we control the - // Readable class this is attached to - return new Promise(function (resolve, reject) { - _this2[kStream].destroy(null, function (err) { - if (err) { - reject(err); - return; - } - - resolve(createIterResult(undefined, true)); - }); - }); -}), _Object$setPrototypeO), AsyncIteratorPrototype); - -var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { - var _Object$create; - - var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { - value: stream, - writable: true - }), _defineProperty(_Object$create, kLastResolve, { - value: null, - writable: true - }), _defineProperty(_Object$create, kLastReject, { - value: null, - writable: true - }), _defineProperty(_Object$create, kError, { - value: null, - writable: true - }), _defineProperty(_Object$create, kEnded, { - value: stream._readableState.endEmitted, - writable: true - }), _defineProperty(_Object$create, kHandlePromise, { - value: function value(resolve, reject) { - var data = iterator[kStream].read(); - - if (data) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(data, false)); - } else { - iterator[kLastResolve] = resolve; - iterator[kLastReject] = reject; - } - }, - writable: true - }), _Object$create)); - iterator[kLastPromise] = null; - finished(stream, function (err) { - if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise - // returned by next() and store the error - - if (reject !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - reject(err); - } - - iterator[kError] = err; - return; - } - - var resolve = iterator[kLastResolve]; - - if (resolve !== null) { - iterator[kLastPromise] = null; - iterator[kLastResolve] = null; - iterator[kLastReject] = null; - resolve(createIterResult(undefined, true)); - } - - iterator[kEnded] = true; - }); - stream.on('readable', onReadable.bind(null, iterator)); - return iterator; -}; - -module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/readable-stream/lib/internal/streams/buffer_list.js deleted file mode 100644 index cdea425..0000000 --- a/node_modules/readable-stream/lib/internal/streams/buffer_list.js +++ /dev/null @@ -1,210 +0,0 @@ -'use strict'; - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } - -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } - -var _require = require('buffer'), - Buffer = _require.Buffer; - -var _require2 = require('util'), - inspect = _require2.inspect; - -var custom = inspect && inspect.custom || 'inspect'; - -function copyBuffer(src, target, offset) { - Buffer.prototype.copy.call(src, target, offset); -} - -module.exports = -/*#__PURE__*/ -function () { - function BufferList() { - _classCallCheck(this, BufferList); - - this.head = null; - this.tail = null; - this.length = 0; - } - - _createClass(BufferList, [{ - key: "push", - value: function push(v) { - var entry = { - data: v, - next: null - }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - } - }, { - key: "unshift", - value: function unshift(v) { - var entry = { - data: v, - next: this.head - }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - } - }, { - key: "shift", - value: function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - } - }, { - key: "clear", - value: function clear() { - this.head = this.tail = null; - this.length = 0; - } - }, { - key: "join", - value: function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - - while (p = p.next) { - ret += s + p.data; - } - - return ret; - } - }, { - key: "concat", - value: function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - - return ret; - } // Consumes a specified amount of bytes or characters from the buffered data. - - }, { - key: "consume", - value: function consume(n, hasStrings) { - var ret; - - if (n < this.head.data.length) { - // `slice` is the same for buffers and strings. - ret = this.head.data.slice(0, n); - this.head.data = this.head.data.slice(n); - } else if (n === this.head.data.length) { - // First chunk is a perfect match. - ret = this.shift(); - } else { - // Result spans more than one buffer. - ret = hasStrings ? this._getString(n) : this._getBuffer(n); - } - - return ret; - } - }, { - key: "first", - value: function first() { - return this.head.data; - } // Consumes a specified amount of characters from the buffered data. - - }, { - key: "_getString", - value: function _getString(n) { - var p = this.head; - var c = 1; - var ret = p.data; - n -= ret.length; - - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = str.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Consumes a specified amount of bytes from the buffered data. - - }, { - key: "_getBuffer", - value: function _getBuffer(n) { - var ret = Buffer.allocUnsafe(n); - var p = this.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) this.head = p.next;else this.head = this.tail = null; - } else { - this.head = p; - p.data = buf.slice(nb); - } - - break; - } - - ++c; - } - - this.length -= c; - return ret; - } // Make sure the linked list only shows the minimal necessary information. - - }, { - key: custom, - value: function value(_, options) { - return inspect(this, _objectSpread({}, options, { - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - })); - } - }]); - - return BufferList; -}(); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js deleted file mode 100644 index 3268a16..0000000 --- a/node_modules/readable-stream/lib/internal/streams/destroy.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; // undocumented cb() API, needed for core, not for public API - -function destroy(err, cb) { - var _this = this; - - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - process.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - process.nextTick(emitErrorNT, this, err); - } - } - - return this; - } // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - - if (this._readableState) { - this._readableState.destroyed = true; - } // if this is a duplex stream mark the writable part as destroyed as well - - - if (this._writableState) { - this._writableState.destroyed = true; - } - - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - process.nextTick(emitErrorAndCloseNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - process.nextTick(emitErrorAndCloseNT, _this, err); - } else { - process.nextTick(emitCloseNT, _this); - } - } else if (cb) { - process.nextTick(emitCloseNT, _this); - cb(err); - } else { - process.nextTick(emitCloseNT, _this); - } - }); - - return this; -} - -function emitErrorAndCloseNT(self, err) { - emitErrorNT(self, err); - emitCloseNT(self); -} - -function emitCloseNT(self) { - if (self._writableState && !self._writableState.emitClose) return; - if (self._readableState && !self._readableState.emitClose) return; - self.emit('close'); -} - -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } -} - -function emitErrorNT(self, err) { - self.emit('error', err); -} - -function errorOrDestroy(stream, err) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - var rState = stream._readableState; - var wState = stream._writableState; - if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); -} - -module.exports = { - destroy: destroy, - undestroy: undestroy, - errorOrDestroy: errorOrDestroy -}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js deleted file mode 100644 index 831f286..0000000 --- a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js +++ /dev/null @@ -1,104 +0,0 @@ -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). -'use strict'; - -var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - - for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { - args[_key] = arguments[_key]; - } - - callback.apply(this, args); - }; -} - -function noop() {} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function eos(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - callback = once(callback || noop); - var readable = opts.readable || opts.readable !== false && stream.readable; - var writable = opts.writable || opts.writable !== false && stream.writable; - - var onlegacyfinish = function onlegacyfinish() { - if (!stream.writable) onfinish(); - }; - - var writableEnded = stream._writableState && stream._writableState.finished; - - var onfinish = function onfinish() { - writable = false; - writableEnded = true; - if (!readable) callback.call(stream); - }; - - var readableEnded = stream._readableState && stream._readableState.endEmitted; - - var onend = function onend() { - readable = false; - readableEnded = true; - if (!writable) callback.call(stream); - }; - - var onerror = function onerror(err) { - callback.call(stream, err); - }; - - var onclose = function onclose() { - var err; - - if (readable && !readableEnded) { - if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - - if (writable && !writableEnded) { - if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); - return callback.call(stream, err); - } - }; - - var onrequest = function onrequest() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest();else stream.on('request', onrequest); - } else if (writable && !stream._writableState) { - // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - return function () { - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -} - -module.exports = eos; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/readable-stream/lib/internal/streams/from-browser.js deleted file mode 100644 index a4ce56f..0000000 --- a/node_modules/readable-stream/lib/internal/streams/from-browser.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = function () { - throw new Error('Readable.from is not available in the browser') -}; diff --git a/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/readable-stream/lib/internal/streams/from.js deleted file mode 100644 index 6c41284..0000000 --- a/node_modules/readable-stream/lib/internal/streams/from.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } - -function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } - -function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - -var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; - -function from(Readable, iterable, opts) { - var iterator; - - if (iterable && typeof iterable.next === 'function') { - iterator = iterable; - } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); - - var readable = new Readable(_objectSpread({ - objectMode: true - }, opts)); // Reading boolean to protect against _read - // being called before last iteration completion. - - var reading = false; - - readable._read = function () { - if (!reading) { - reading = true; - next(); - } - }; - - function next() { - return _next2.apply(this, arguments); - } - - function _next2() { - _next2 = _asyncToGenerator(function* () { - try { - var _ref = yield iterator.next(), - value = _ref.value, - done = _ref.done; - - if (done) { - readable.push(null); - } else if (readable.push((yield value))) { - next(); - } else { - reading = false; - } - } catch (err) { - readable.destroy(err); - } - }); - return _next2.apply(this, arguments); - } - - return readable; -} - -module.exports = from; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/readable-stream/lib/internal/streams/pipeline.js deleted file mode 100644 index 6589909..0000000 --- a/node_modules/readable-stream/lib/internal/streams/pipeline.js +++ /dev/null @@ -1,97 +0,0 @@ -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). -'use strict'; - -var eos; - -function once(callback) { - var called = false; - return function () { - if (called) return; - called = true; - callback.apply(void 0, arguments); - }; -} - -var _require$codes = require('../../../errors').codes, - ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; - -function noop(err) { - // Rethrow the error if it exists to avoid swallowing it - if (err) throw err; -} - -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -} - -function destroyer(stream, reading, writing, callback) { - callback = once(callback); - var closed = false; - stream.on('close', function () { - closed = true; - }); - if (eos === undefined) eos = require('./end-of-stream'); - eos(stream, { - readable: reading, - writable: writing - }, function (err) { - if (err) return callback(err); - closed = true; - callback(); - }); - var destroyed = false; - return function (err) { - if (closed) return; - if (destroyed) return; - destroyed = true; // request.destroy just do .end - .abort is what we want - - if (isRequest(stream)) return stream.abort(); - if (typeof stream.destroy === 'function') return stream.destroy(); - callback(err || new ERR_STREAM_DESTROYED('pipe')); - }; -} - -function call(fn) { - fn(); -} - -function pipe(from, to) { - return from.pipe(to); -} - -function popCallback(streams) { - if (!streams.length) return noop; - if (typeof streams[streams.length - 1] !== 'function') return noop; - return streams.pop(); -} - -function pipeline() { - for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { - streams[_key] = arguments[_key]; - } - - var callback = popCallback(streams); - if (Array.isArray(streams[0])) streams = streams[0]; - - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams'); - } - - var error; - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1; - var writing = i > 0; - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err; - if (err) destroys.forEach(call); - if (reading) return; - destroys.forEach(call); - callback(error); - }); - }); - return streams.reduce(pipe); -} - -module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/readable-stream/lib/internal/streams/state.js deleted file mode 100644 index 19887eb..0000000 --- a/node_modules/readable-stream/lib/internal/streams/state.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; - -var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; - -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; -} - -function getHighWaterMark(state, options, duplexKey, isDuplex) { - var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); - - if (hwm != null) { - if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { - var name = isDuplex ? duplexKey : 'highWaterMark'; - throw new ERR_INVALID_OPT_VALUE(name, hwm); - } - - return Math.floor(hwm); - } // Default value - - - return state.objectMode ? 16 : 16 * 1024; -} - -module.exports = { - getHighWaterMark: getHighWaterMark -}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js deleted file mode 100644 index 9332a3f..0000000 --- a/node_modules/readable-stream/lib/internal/streams/stream-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js deleted file mode 100644 index ce2ad5b..0000000 --- a/node_modules/readable-stream/lib/internal/streams/stream.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('stream'); diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json deleted file mode 100644 index 5ab4f78..0000000 --- a/node_modules/readable-stream/package.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "_from": "readable-stream@^3.1.1", - "_id": "readable-stream@3.6.0", - "_inBundle": false, - "_integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "_location": "/readable-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "readable-stream@^3.1.1", - "name": "readable-stream", - "escapedName": "readable-stream", - "rawSpec": "^3.1.1", - "saveSpec": null, - "fetchSpec": "^3.1.1" - }, - "_requiredBy": [ - "/duplexify", - "/htmlparser2", - "/stream-http" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "_shasum": "337bbda3adc0706bd3e024426a286d4b4b2c9198", - "_spec": "readable-stream@^3.1.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/duplexify", - "browser": { - "util": false, - "worker_threads": false, - "./errors": "./errors-browser.js", - "./readable.js": "./readable-browser.js", - "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", - "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "deprecated": false, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "@babel/cli": "^7.2.0", - "@babel/core": "^7.2.0", - "@babel/polyfill": "^7.0.0", - "@babel/preset-env": "^7.2.0", - "airtap": "0.0.9", - "assert": "^1.4.0", - "bl": "^2.0.0", - "deep-strict-equal": "^0.2.0", - "events.once": "^2.0.2", - "glob": "^7.1.2", - "gunzip-maybe": "^1.4.1", - "hyperquest": "^2.1.3", - "lolex": "^2.6.0", - "nyc": "^11.0.0", - "pump": "^3.0.0", - "rimraf": "^2.6.2", - "tap": "^12.0.0", - "tape": "^4.9.0", - "tar-fs": "^1.16.2", - "util-promisify": "^2.1.0" - }, - "engines": { - "node": ">= 6" - }, - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "name": "readable-stream", - "nyc": { - "include": [ - "lib/**.js" - ] - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", - "cover": "nyc npm test", - "report": "nyc report --reporter=lcov", - "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", - "test-browser-local": "airtap --open --local -- test/browser.js", - "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", - "update-browser-errors": "babel -o errors-browser.js errors.js" - }, - "version": "3.6.0" -} diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js deleted file mode 100644 index adbf60d..0000000 --- a/node_modules/readable-stream/readable-browser.js +++ /dev/null @@ -1,9 +0,0 @@ -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); -exports.finished = require('./lib/internal/streams/end-of-stream.js'); -exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js deleted file mode 100644 index 9e0ca12..0000000 --- a/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,16 +0,0 @@ -var Stream = require('stream'); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream.Readable; - Object.assign(module.exports, Stream); - module.exports.Stream = Stream; -} else { - exports = module.exports = require('./lib/_stream_readable.js'); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = require('./lib/_stream_writable.js'); - exports.Duplex = require('./lib/_stream_duplex.js'); - exports.Transform = require('./lib/_stream_transform.js'); - exports.PassThrough = require('./lib/_stream_passthrough.js'); - exports.finished = require('./lib/internal/streams/end-of-stream.js'); - exports.pipeline = require('./lib/internal/streams/pipeline.js'); -} diff --git a/node_modules/safe-buffer/LICENSE b/node_modules/safe-buffer/LICENSE deleted file mode 100644 index 0c068ce..0000000 --- a/node_modules/safe-buffer/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/safe-buffer/README.md b/node_modules/safe-buffer/README.md deleted file mode 100644 index 356e351..0000000 --- a/node_modules/safe-buffer/README.md +++ /dev/null @@ -1,586 +0,0 @@ -# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg -[travis-url]: https://travis-ci.org/feross/safe-buffer -[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg -[npm-url]: https://npmjs.org/package/safe-buffer -[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg -[downloads-url]: https://npmjs.org/package/safe-buffer -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -#### Safer Node.js Buffer API - -**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, -`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** - -**Uses the built-in implementation when available.** - -## install - -``` -npm install safe-buffer -``` - -[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme) - -## usage - -The goal of this package is to provide a safe replacement for the node.js `Buffer`. - -It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to -the top of your node.js modules: - -```js -var Buffer = require('safe-buffer').Buffer - -// Existing buffer code will continue to work without issues: - -new Buffer('hey', 'utf8') -new Buffer([1, 2, 3], 'utf8') -new Buffer(obj) -new Buffer(16) // create an uninitialized buffer (potentially unsafe) - -// But you can use these new explicit APIs to make clear what you want: - -Buffer.from('hey', 'utf8') // convert from many types to a Buffer -Buffer.alloc(16) // create a zero-filled buffer (safe) -Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) -``` - -## api - -### Class Method: Buffer.from(array) - - -* `array` {Array} - -Allocates a new `Buffer` using an `array` of octets. - -```js -const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); - // creates a new Buffer containing ASCII bytes - // ['b','u','f','f','e','r'] -``` - -A `TypeError` will be thrown if `array` is not an `Array`. - -### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) - - -* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or - a `new ArrayBuffer()` -* `byteOffset` {Number} Default: `0` -* `length` {Number} Default: `arrayBuffer.length - byteOffset` - -When passed a reference to the `.buffer` property of a `TypedArray` instance, -the newly created `Buffer` will share the same allocated memory as the -TypedArray. - -```js -const arr = new Uint16Array(2); -arr[0] = 5000; -arr[1] = 4000; - -const buf = Buffer.from(arr.buffer); // shares the memory with arr; - -console.log(buf); - // Prints: - -// changing the TypedArray changes the Buffer also -arr[1] = 6000; - -console.log(buf); - // Prints: -``` - -The optional `byteOffset` and `length` arguments specify a memory range within -the `arrayBuffer` that will be shared by the `Buffer`. - -```js -const ab = new ArrayBuffer(10); -const buf = Buffer.from(ab, 0, 2); -console.log(buf.length); - // Prints: 2 -``` - -A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. - -### Class Method: Buffer.from(buffer) - - -* `buffer` {Buffer} - -Copies the passed `buffer` data onto a new `Buffer` instance. - -```js -const buf1 = Buffer.from('buffer'); -const buf2 = Buffer.from(buf1); - -buf1[0] = 0x61; -console.log(buf1.toString()); - // 'auffer' -console.log(buf2.toString()); - // 'buffer' (copy is not changed) -``` - -A `TypeError` will be thrown if `buffer` is not a `Buffer`. - -### Class Method: Buffer.from(str[, encoding]) - - -* `str` {String} String to encode. -* `encoding` {String} Encoding to use, Default: `'utf8'` - -Creates a new `Buffer` containing the given JavaScript string `str`. If -provided, the `encoding` parameter identifies the character encoding. -If not provided, `encoding` defaults to `'utf8'`. - -```js -const buf1 = Buffer.from('this is a tést'); -console.log(buf1.toString()); - // prints: this is a tést -console.log(buf1.toString('ascii')); - // prints: this is a tC)st - -const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); -console.log(buf2.toString()); - // prints: this is a tést -``` - -A `TypeError` will be thrown if `str` is not a string. - -### Class Method: Buffer.alloc(size[, fill[, encoding]]) - - -* `size` {Number} -* `fill` {Value} Default: `undefined` -* `encoding` {String} Default: `utf8` - -Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the -`Buffer` will be *zero-filled*. - -```js -const buf = Buffer.alloc(5); -console.log(buf); - // -``` - -The `size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -If `fill` is specified, the allocated `Buffer` will be initialized by calling -`buf.fill(fill)`. See [`buf.fill()`][] for more information. - -```js -const buf = Buffer.alloc(5, 'a'); -console.log(buf); - // -``` - -If both `fill` and `encoding` are specified, the allocated `Buffer` will be -initialized by calling `buf.fill(fill, encoding)`. For example: - -```js -const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); -console.log(buf); - // -``` - -Calling `Buffer.alloc(size)` can be significantly slower than the alternative -`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance -contents will *never contain sensitive data*. - -A `TypeError` will be thrown if `size` is not a number. - -### Class Method: Buffer.allocUnsafe(size) - - -* `size` {Number} - -Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must -be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit -architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is -thrown. A zero-length Buffer will be created if a `size` less than or equal to -0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -```js -const buf = Buffer.allocUnsafe(5); -console.log(buf); - // - // (octets will be different, every time) -buf.fill(0); -console.log(buf); - // -``` - -A `TypeError` will be thrown if `size` is not a number. - -Note that the `Buffer` module pre-allocates an internal `Buffer` instance of -size `Buffer.poolSize` that is used as a pool for the fast allocation of new -`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated -`new Buffer(size)` constructor) only when `size` is less than or equal to -`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default -value of `Buffer.poolSize` is `8192` but can be modified. - -Use of this pre-allocated internal memory pool is a key difference between -calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. -Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer -pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal -Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The -difference is subtle but can be important when an application requires the -additional performance that `Buffer.allocUnsafe(size)` provides. - -### Class Method: Buffer.allocUnsafeSlow(size) - - -* `size` {Number} - -Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The -`size` must be less than or equal to the value of -`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is -`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will -be created if a `size` less than or equal to 0 is specified. - -The underlying memory for `Buffer` instances created in this way is *not -initialized*. The contents of the newly created `Buffer` are unknown and -*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such -`Buffer` instances to zeroes. - -When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, -allocations under 4KB are, by default, sliced from a single pre-allocated -`Buffer`. This allows applications to avoid the garbage collection overhead of -creating many individually allocated Buffers. This approach improves both -performance and memory usage by eliminating the need to track and cleanup as -many `Persistent` objects. - -However, in the case where a developer may need to retain a small chunk of -memory from a pool for an indeterminate amount of time, it may be appropriate -to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then -copy out the relevant bits. - -```js -// need to keep around a few small chunks of memory -const store = []; - -socket.on('readable', () => { - const data = socket.read(); - // allocate for retained data - const sb = Buffer.allocUnsafeSlow(10); - // copy the data into the new allocation - data.copy(sb, 0, 0, 10); - store.push(sb); -}); -``` - -Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* -a developer has observed undue memory retention in their applications. - -A `TypeError` will be thrown if `size` is not a number. - -### All the Rest - -The rest of the `Buffer` API is exactly the same as in node.js. -[See the docs](https://nodejs.org/api/buffer.html). - - -## Related links - -- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) -- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) - -## Why is `Buffer` unsafe? - -Today, the node.js `Buffer` constructor is overloaded to handle many different argument -types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), -`ArrayBuffer`, and also `Number`. - -The API is optimized for convenience: you can throw any type at it, and it will try to do -what you want. - -Because the Buffer constructor is so powerful, you often see code like this: - -```js -// Convert UTF-8 strings to hex -function toHex (str) { - return new Buffer(str).toString('hex') -} -``` - -***But what happens if `toHex` is called with a `Number` argument?*** - -### Remote Memory Disclosure - -If an attacker can make your program call the `Buffer` constructor with a `Number` -argument, then they can make it allocate uninitialized memory from the node.js process. -This could potentially disclose TLS private keys, user data, or database passwords. - -When the `Buffer` constructor is passed a `Number` argument, it returns an -**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like -this, you **MUST** overwrite the contents before returning it to the user. - -From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): - -> `new Buffer(size)` -> -> - `size` Number -> -> The underlying memory for `Buffer` instances created in this way is not initialized. -> **The contents of a newly created `Buffer` are unknown and could contain sensitive -> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. - -(Emphasis our own.) - -Whenever the programmer intended to create an uninitialized `Buffer` you often see code -like this: - -```js -var buf = new Buffer(16) - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### Would this ever be a problem in real code? - -Yes. It's surprisingly common to forget to check the type of your variables in a -dynamically-typed language like JavaScript. - -Usually the consequences of assuming the wrong type is that your program crashes with an -uncaught exception. But the failure mode for forgetting to check the type of arguments to -the `Buffer` constructor is more catastrophic. - -Here's an example of a vulnerable service that takes a JSON payload and converts it to -hex: - -```js -// Take a JSON payload {str: "some string"} and convert it to hex -var server = http.createServer(function (req, res) { - var data = '' - req.setEncoding('utf8') - req.on('data', function (chunk) { - data += chunk - }) - req.on('end', function () { - var body = JSON.parse(data) - res.end(new Buffer(body.str).toString('hex')) - }) -}) - -server.listen(8080) -``` - -In this example, an http client just has to send: - -```json -{ - "str": 1000 -} -``` - -and it will get back 1,000 bytes of uninitialized memory from the server. - -This is a very serious bug. It's similar in severity to the -[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process -memory by remote attackers. - - -### Which real-world packages were vulnerable? - -#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) - -[Mathias Buus](https://github.com/mafintosh) and I -([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, -[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow -anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get -them to reveal 20 bytes at a time of uninitialized memory from the node.js process. - -Here's -[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) -that fixed it. We released a new fixed version, created a -[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all -vulnerable versions on npm so users will get a warning to upgrade to a newer version. - -#### [`ws`](https://www.npmjs.com/package/ws) - -That got us wondering if there were other vulnerable packages. Sure enough, within a short -period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the -most popular WebSocket implementation in node.js. - -If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as -expected, then uninitialized server memory would be disclosed to the remote peer. - -These were the vulnerable methods: - -```js -socket.send(number) -socket.ping(number) -socket.pong(number) -``` - -Here's a vulnerable socket server with some echo functionality: - -```js -server.on('connection', function (socket) { - socket.on('message', function (message) { - message = JSON.parse(message) - if (message.type === 'echo') { - socket.send(message.data) // send back the user's message - } - }) -}) -``` - -`socket.send(number)` called on the server, will disclose server memory. - -Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue -was fixed, with a more detailed explanation. Props to -[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the -[Node Security Project disclosure](https://nodesecurity.io/advisories/67). - - -### What's the solution? - -It's important that node.js offers a fast way to get memory otherwise performance-critical -applications would needlessly get a lot slower. - -But we need a better way to *signal our intent* as programmers. **When we want -uninitialized memory, we should request it explicitly.** - -Sensitive functionality should not be packed into a developer-friendly API that loosely -accepts many different types. This type of API encourages the lazy practice of passing -variables in without checking the type very carefully. - -#### A new API: `Buffer.allocUnsafe(number)` - -The functionality of creating buffers with uninitialized memory should be part of another -API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that -frequently gets user input of all sorts of different types passed into it. - -```js -var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! - -// Immediately overwrite the uninitialized buffer with data from another buffer -for (var i = 0; i < buf.length; i++) { - buf[i] = otherBuf[i] -} -``` - - -### How do we fix node.js core? - -We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as -`semver-major`) which defends against one case: - -```js -var str = 16 -new Buffer(str, 'utf8') -``` - -In this situation, it's implied that the programmer intended the first argument to be a -string, since they passed an encoding as a second argument. Today, node.js will allocate -uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not -what the programmer intended. - -But this is only a partial solution, since if the programmer does `new Buffer(variable)` -(without an `encoding` parameter) there's no way to know what they intended. If `variable` -is sometimes a number, then uninitialized memory will sometimes be returned. - -### What's the real long-term fix? - -We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when -we need uninitialized memory. But that would break 1000s of packages. - -~~We believe the best solution is to:~~ - -~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ - -~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ - -#### Update - -We now support adding three new APIs: - -- `Buffer.from(value)` - convert from any type to a buffer -- `Buffer.alloc(size)` - create a zero-filled buffer -- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size - -This solves the core problem that affected `ws` and `bittorrent-dht` which is -`Buffer(variable)` getting tricked into taking a number argument. - -This way, existing code continues working and the impact on the npm ecosystem will be -minimal. Over time, npm maintainers can migrate performance-critical code to use -`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. - - -### Conclusion - -We think there's a serious design issue with the `Buffer` API as it exists today. It -promotes insecure software by putting high-risk functionality into a convenient API -with friendly "developer ergonomics". - -This wasn't merely a theoretical exercise because we found the issue in some of the -most popular npm packages. - -Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of -`buffer`. - -```js -var Buffer = require('safe-buffer').Buffer -``` - -Eventually, we hope that node.js core can switch to this new, safer behavior. We believe -the impact on the ecosystem would be minimal since it's not a breaking change. -Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while -older, insecure packages would magically become safe from this attack vector. - - -## links - -- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) -- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) -- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) - - -## credit - -The original issues in `bittorrent-dht` -([disclosure](https://nodesecurity.io/advisories/68)) and -`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by -[Mathias Buus](https://github.com/mafintosh) and -[Feross Aboukhadijeh](http://feross.org/). - -Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues -and for his work running the [Node Security Project](https://nodesecurity.io/). - -Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and -auditing the code. - - -## license - -MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/safe-buffer/index.d.ts b/node_modules/safe-buffer/index.d.ts deleted file mode 100644 index e9fed80..0000000 --- a/node_modules/safe-buffer/index.d.ts +++ /dev/null @@ -1,187 +0,0 @@ -declare module "safe-buffer" { - export class Buffer { - length: number - write(string: string, offset?: number, length?: number, encoding?: string): number; - toString(encoding?: string, start?: number, end?: number): string; - toJSON(): { type: 'Buffer', data: any[] }; - equals(otherBuffer: Buffer): boolean; - compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; - copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; - slice(start?: number, end?: number): Buffer; - writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; - readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; - readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; - readUInt8(offset: number, noAssert?: boolean): number; - readUInt16LE(offset: number, noAssert?: boolean): number; - readUInt16BE(offset: number, noAssert?: boolean): number; - readUInt32LE(offset: number, noAssert?: boolean): number; - readUInt32BE(offset: number, noAssert?: boolean): number; - readInt8(offset: number, noAssert?: boolean): number; - readInt16LE(offset: number, noAssert?: boolean): number; - readInt16BE(offset: number, noAssert?: boolean): number; - readInt32LE(offset: number, noAssert?: boolean): number; - readInt32BE(offset: number, noAssert?: boolean): number; - readFloatLE(offset: number, noAssert?: boolean): number; - readFloatBE(offset: number, noAssert?: boolean): number; - readDoubleLE(offset: number, noAssert?: boolean): number; - readDoubleBE(offset: number, noAssert?: boolean): number; - swap16(): Buffer; - swap32(): Buffer; - swap64(): Buffer; - writeUInt8(value: number, offset: number, noAssert?: boolean): number; - writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeInt8(value: number, offset: number, noAssert?: boolean): number; - writeInt16LE(value: number, offset: number, noAssert?: boolean): number; - writeInt16BE(value: number, offset: number, noAssert?: boolean): number; - writeInt32LE(value: number, offset: number, noAssert?: boolean): number; - writeInt32BE(value: number, offset: number, noAssert?: boolean): number; - writeFloatLE(value: number, offset: number, noAssert?: boolean): number; - writeFloatBE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; - writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; - fill(value: any, offset?: number, end?: number): this; - indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; - includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; - - /** - * Allocates a new buffer containing the given {str}. - * - * @param str String to store in buffer. - * @param encoding encoding to use, optional. Default is 'utf8' - */ - constructor (str: string, encoding?: string); - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - */ - constructor (size: number); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: Uint8Array); - /** - * Produces a Buffer backed by the same allocated memory as - * the given {ArrayBuffer}. - * - * - * @param arrayBuffer The ArrayBuffer with which to share memory. - */ - constructor (arrayBuffer: ArrayBuffer); - /** - * Allocates a new buffer containing the given {array} of octets. - * - * @param array The octets to store. - */ - constructor (array: any[]); - /** - * Copies the passed {buffer} data onto a new {Buffer} instance. - * - * @param buffer The buffer to copy. - */ - constructor (buffer: Buffer); - prototype: Buffer; - /** - * Allocates a new Buffer using an {array} of octets. - * - * @param array - */ - static from(array: any[]): Buffer; - /** - * When passed a reference to the .buffer property of a TypedArray instance, - * the newly created Buffer will share the same allocated memory as the TypedArray. - * The optional {byteOffset} and {length} arguments specify a memory range - * within the {arrayBuffer} that will be shared by the Buffer. - * - * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() - * @param byteOffset - * @param length - */ - static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; - /** - * Copies the passed {buffer} data onto a new Buffer instance. - * - * @param buffer - */ - static from(buffer: Buffer): Buffer; - /** - * Creates a new Buffer containing the given JavaScript string {str}. - * If provided, the {encoding} parameter identifies the character encoding. - * If not provided, {encoding} defaults to 'utf8'. - * - * @param str - */ - static from(str: string, encoding?: string): Buffer; - /** - * Returns true if {obj} is a Buffer - * - * @param obj object to test. - */ - static isBuffer(obj: any): obj is Buffer; - /** - * Returns true if {encoding} is a valid encoding argument. - * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' - * - * @param encoding string to test. - */ - static isEncoding(encoding: string): boolean; - /** - * Gives the actual byte length of a string. encoding defaults to 'utf8'. - * This is not the same as String.prototype.length since that returns the number of characters in a string. - * - * @param string string to test. - * @param encoding encoding used to evaluate (defaults to 'utf8') - */ - static byteLength(string: string, encoding?: string): number; - /** - * Returns a buffer which is the result of concatenating all the buffers in the list together. - * - * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. - * If the list has exactly one item, then the first item of the list is returned. - * If the list has more than one item, then a new Buffer is created. - * - * @param list An array of Buffer objects to concatenate - * @param totalLength Total length of the buffers when concatenated. - * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. - */ - static concat(list: Buffer[], totalLength?: number): Buffer; - /** - * The same as buf1.compare(buf2). - */ - static compare(buf1: Buffer, buf2: Buffer): number; - /** - * Allocates a new buffer of {size} octets. - * - * @param size count of octets to allocate. - * @param fill if specified, buffer will be initialized by calling buf.fill(fill). - * If parameter is omitted, buffer will be filled with zeros. - * @param encoding encoding used for call to buf.fill while initalizing - */ - static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; - /** - * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafe(size: number): Buffer; - /** - * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents - * of the newly created Buffer are unknown and may contain sensitive data. - * - * @param size count of octets to allocate - */ - static allocUnsafeSlow(size: number): Buffer; - } -} \ No newline at end of file diff --git a/node_modules/safe-buffer/index.js b/node_modules/safe-buffer/index.js deleted file mode 100644 index 054c8d3..0000000 --- a/node_modules/safe-buffer/index.js +++ /dev/null @@ -1,64 +0,0 @@ -/* eslint-disable node/no-deprecated-api */ -var buffer = require('buffer') -var Buffer = buffer.Buffer - -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] - } -} -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} - -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} - -SafeBuffer.prototype = Object.create(Buffer.prototype) - -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) - -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') - } - return Buffer(arg, encodingOrOffset, length) -} - -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) - } - } else { - buf.fill(0) - } - return buf -} - -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return Buffer(size) -} - -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - return buffer.SlowBuffer(size) -} diff --git a/node_modules/safe-buffer/package.json b/node_modules/safe-buffer/package.json deleted file mode 100644 index d65117d..0000000 --- a/node_modules/safe-buffer/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "_from": "safe-buffer@~5.2.0", - "_id": "safe-buffer@5.2.0", - "_inBundle": false, - "_integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==", - "_location": "/safe-buffer", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "safe-buffer@~5.2.0", - "name": "safe-buffer", - "escapedName": "safe-buffer", - "rawSpec": "~5.2.0", - "saveSpec": null, - "fetchSpec": "~5.2.0" - }, - "_requiredBy": [ - "/browser-pack", - "/browserify-aes", - "/browserify-des", - "/cipher-base", - "/cpx2", - "/create-hmac", - "/evp_bytestokey", - "/got", - "/hash-base", - "/md5.js", - "/parse-asn1", - "/pbkdf2", - "/public-encrypt", - "/randombytes", - "/randomfill", - "/registry-auth-token", - "/request", - "/sha.js", - "/string_decoder", - "/tunnel-agent" - ], - "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz", - "_shasum": "b74daec49b1148f88c64b68d49b1e815c1f2f519", - "_spec": "safe-buffer@~5.2.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/string_decoder", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "http://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/safe-buffer/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "Safer Node.js Buffer API", - "devDependencies": { - "standard": "*", - "tape": "^4.0.0" - }, - "homepage": "https://github.com/feross/safe-buffer", - "keywords": [ - "buffer", - "buffer allocate", - "node security", - "safe", - "safe-buffer", - "security", - "uninitialized" - ], - "license": "MIT", - "main": "index.js", - "name": "safe-buffer", - "repository": { - "type": "git", - "url": "git://github.com/feross/safe-buffer.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - }, - "types": "index.d.ts", - "version": "5.2.0" -} diff --git a/node_modules/stream-shift/.travis.yml b/node_modules/stream-shift/.travis.yml deleted file mode 100644 index ecd4193..0000000 --- a/node_modules/stream-shift/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: node_js -node_js: - - "0.10" - - "0.12" - - "4" - - "6" diff --git a/node_modules/stream-shift/LICENSE b/node_modules/stream-shift/LICENSE deleted file mode 100644 index bae9da7..0000000 --- a/node_modules/stream-shift/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/stream-shift/README.md b/node_modules/stream-shift/README.md deleted file mode 100644 index d9cc2d9..0000000 --- a/node_modules/stream-shift/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# stream-shift - -Returns the next buffer/object in a stream's readable queue - -``` -npm install stream-shift -``` - -[![build status](http://img.shields.io/travis/mafintosh/stream-shift.svg?style=flat)](http://travis-ci.org/mafintosh/stream-shift) - -## Usage - -``` js -var shift = require('stream-shift') - -console.log(shift(someStream)) // first item in its buffer -``` - -## Credit - -Thanks [@dignifiedquire](https://github.com/dignifiedquire) for making this work on node 6 - -## License - -MIT diff --git a/node_modules/stream-shift/index.js b/node_modules/stream-shift/index.js deleted file mode 100644 index 33cc4d7..0000000 --- a/node_modules/stream-shift/index.js +++ /dev/null @@ -1,20 +0,0 @@ -module.exports = shift - -function shift (stream) { - var rs = stream._readableState - if (!rs) return null - return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) -} - -function getStateLength (state) { - if (state.buffer.length) { - // Since node 6.3.0 state.buffer is a BufferList not an array - if (state.buffer.head) { - return state.buffer.head.data.length - } - - return state.buffer[0].length - } - - return state.length -} diff --git a/node_modules/stream-shift/package.json b/node_modules/stream-shift/package.json deleted file mode 100644 index b13abd5..0000000 --- a/node_modules/stream-shift/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "_from": "stream-shift@^1.0.0", - "_id": "stream-shift@1.0.1", - "_inBundle": false, - "_integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==", - "_location": "/stream-shift", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "stream-shift@^1.0.0", - "name": "stream-shift", - "escapedName": "stream-shift", - "rawSpec": "^1.0.0", - "saveSpec": null, - "fetchSpec": "^1.0.0" - }, - "_requiredBy": [ - "/duplexify" - ], - "_resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", - "_shasum": "d7088281559ab2778424279b0877da3c392d5a3d", - "_spec": "stream-shift@^1.0.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/duplexify", - "author": { - "name": "Mathias Buus", - "url": "@mafintosh" - }, - "bugs": { - "url": "https://github.com/mafintosh/stream-shift/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Returns the next buffer/object in a stream's readable queue", - "devDependencies": { - "standard": "^7.1.2", - "tape": "^4.6.0", - "through2": "^2.0.1" - }, - "homepage": "https://github.com/mafintosh/stream-shift", - "license": "MIT", - "main": "index.js", - "name": "stream-shift", - "repository": { - "type": "git", - "url": "git+https://github.com/mafintosh/stream-shift.git" - }, - "scripts": { - "test": "standard && tape test.js" - }, - "version": "1.0.1" -} diff --git a/node_modules/stream-shift/test.js b/node_modules/stream-shift/test.js deleted file mode 100644 index c0222c3..0000000 --- a/node_modules/stream-shift/test.js +++ /dev/null @@ -1,48 +0,0 @@ -var tape = require('tape') -var through = require('through2') -var stream = require('stream') -var shift = require('./') - -tape('shifts next', function (t) { - var passthrough = through() - - passthrough.write('hello') - passthrough.write('world') - - t.same(shift(passthrough), Buffer('hello')) - t.same(shift(passthrough), Buffer('world')) - t.end() -}) - -tape('shifts next with core', function (t) { - var passthrough = stream.PassThrough() - - passthrough.write('hello') - passthrough.write('world') - - t.same(shift(passthrough), Buffer('hello')) - t.same(shift(passthrough), Buffer('world')) - t.end() -}) - -tape('shifts next with object mode', function (t) { - var passthrough = through({objectMode: true}) - - passthrough.write({hello: 1}) - passthrough.write({world: 1}) - - t.same(shift(passthrough), {hello: 1}) - t.same(shift(passthrough), {world: 1}) - t.end() -}) - -tape('shifts next with object mode with core', function (t) { - var passthrough = stream.PassThrough({objectMode: true}) - - passthrough.write({hello: 1}) - passthrough.write({world: 1}) - - t.same(shift(passthrough), {hello: 1}) - t.same(shift(passthrough), {world: 1}) - t.end() -}) diff --git a/node_modules/streamx/.travis.yml b/node_modules/streamx/.travis.yml deleted file mode 100644 index 51180fc..0000000 --- a/node_modules/streamx/.travis.yml +++ /dev/null @@ -1,8 +0,0 @@ -language: node_js -node_js: - - '10' - - '12' -os: - - windows - - osx - - linux diff --git a/node_modules/streamx/LICENSE b/node_modules/streamx/LICENSE deleted file mode 100644 index c728dc7..0000000 --- a/node_modules/streamx/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Mathias Buus - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/streamx/README.md b/node_modules/streamx/README.md deleted file mode 100644 index d87eac6..0000000 --- a/node_modules/streamx/README.md +++ /dev/null @@ -1,366 +0,0 @@ -# streamx - -An iteration of the Node.js core streams with a series of improvements. - -``` -npm install streamx -``` - -[![Build Status](https://travis-ci.org/mafintosh/streamx.svg?branch=master)](https://travis-ci.org/mafintosh/streamx) - -## Main improvements from Node.js core stream - -#### Proper lifecycle support. - -Streams have an `_open` function that is called before any read/write operation and a `_destroy` -function that is always run as the last part of the stream. - -This makes it easy to maintain state. - -#### Easy error handling - -Fully integrates a `.destroy()` function. When called the stream will wait for any -pending operation to finish and call the stream destroy logic. - -Close is *always* the last event emitted and `destroy` is always ran. - -#### `pipe()` error handles - -`pipe` accepts a callback that is called when the pipeline is fully drained. -It also error handles the streams provided and destroys both streams if either -of them fail. - -#### All streams are both binary and object mode streams - -A `map` function can be provided to map your input data into buffers -or other formats. To indicate how much buffer space each data item takes -an `byteLength` function can be provided as well. - -This removes the need for two modes of streams. - -#### Simplicity - -This is a full rewrite, all contained in one file. - -Lots of stream methods are simplified based on how I and devs I work with actually use streams in the wild. - -#### Backwards compat - -streamx aims to be compatible with Node.js streams whenever it is reasonable to do so. - -This means that streamx streams behave a lot like Node.js streams from the outside but still provides the -improvements above. - -## Usage - -``` js -const { Readable } = require('streamx') - -const rs = new Readable({ - read (cb) { - this.push('Cool data') - cb(null) - } -}) - -rs.on('data', data => console.log('data:', data)) -``` - -## API - -This streamx package contains 4 streams similar to Node.js core. - -## Readable Stream - -#### `rs = new stream.Readable([options])` - -Create a new readable stream. - -Options include: - -``` -{ - highWaterMark: 16384, // max buffer size in bytes - map: (data) => data, // optional function to map input data - byteLength: (data) => size // optional function that calculates the byte size of input data -} -``` - -In addition you can pass the `open`, `read`, and `destroy` functions as shorthands in -the constructor instead of overwrite the methods below. - -The default byteLength function returns the byte length of buffers and `1024` -for any other object. This means the buffer will contain around 16 non buffers -or buffers worth 16kb when full if the defaults are used. - -#### `rs._read(cb)` - -This function is called when the stream wants you to push new data. -Overwrite this and add your own read logic. -You should call the callback when you are fully done with the read. - -Can also be set using `options.read` in the constructor. - -Note that this function differs from Node.js streams in that it takes -the "read finished" callback. - -#### `drained = rs.push(data)` - -Push new data to the stream. Returns true if the buffer is not full -and you should push more data if you can. - -If you call `rs.push(null)` you signal to the stream that no more -data will be pushed and that you want to end the stream. - -#### `data = rs.read()` - -Read a piece of data from the stream buffer. If the buffer is currently empty -`null` will be returned and you should wait for `readable` to be emitted before -trying again. If the stream has been ended it will also return `null`. - -Note that this method differs from Node.js streams in that it does not accept -an optional amounts of bytes to consume. - -#### `rs.unshift(data)` - -Add a piece of data to the front of the buffer. Use this if you read too much -data using the `rs.read()` function. - -#### `rs._open(cb)` - -This function is called once before the first read is issued. Use this function -to implement your own open logic. - -Can also be set using `options.open` in the constructor. - -#### `rs._destroy(cb)` - -This function is called just before the stream is fully destroyed. You should -use this to implement whatever teardown logic you need. The final part of the -stream life cycle is always to call destroy itself so this function will always -be called wheather or not the stream ends gracefully or forcefully. - -Can also be set using `options.destroy` in the constructor. - -Note that the `_destroy` might be called without the open function being called -in case no read was ever performed on the stream. - -#### `rs._predestroy()` - -A simple hook that is called as soon as the first `stream.destroy()` call is invoked. - -Use this in case you need to cancel pending reads (if possible) instead of waiting for them to finish. - -Can also be set using `options.predestroy` in the constructor. - -#### `rs.destroy([error])` - -Forcefully destroy the stream. Will call `_destroy` as soon as all pending reads have finished. -Once the stream is fully destroyed `close` will be emitted. - -If you pass an error this error will be emitted just before `close` is, signifying a reason -as to why this stream was destroyed. - -#### `rs.pause()` - -Pauses the stream. You will only need to call this if you want to pause a resumed stream. - -#### `rs.resume()` - -Will start reading data from the stream as fast as possible. - -If you do not call this, you need to use the `read()` method to read data or the `pipe()` method to -pipe the stream somewhere else or the `data` handler. - -If none of these option are used the stream will stay paused. - -#### `writableStream = rs.pipe(writableStream, [callback])` - -Efficently pipe the readable stream to a writable stream (can be Node.js core stream or a stream from this package). -If you provide a callback the callback is called when the pipeline has fully finished with an optional error in case -it failed. - -To cancel the pipeline destroy either of the streams. - -#### `rs.on('readable')` - -Emitted when data is pushed to the stream if the buffer was previously empty. - -#### `rs.on('data', data)` - -Emitted when data is being read from the stream. If you attach a data handler you are implicitly resuming the stream. - -#### `rs.on('end')` - -Emitted when the readable stream has ended and no data is left in it's buffer. - -#### `rs.on('close')` - -Emitted when the readable stream has fully closed (i.e. it's destroy function has completed) - -#### `rs.on('error', err)` - -Emitted if any of the stream operations fail with an error. `close` is always emitted right after this. - -#### `rs.destroyed` - -Boolean property indicating wheather or not this stream has been destroyed. - -#### `bool = Readable.isBackpressured(rs)` - -Static method to check if a readable stream is currently under backpressure. - -### `stream = Readable.from(arrayOrBufferOrStringOrAsyncIterator) - -Static method to turn an array or buffer or string or AsyncIterator into a readable stream. - -## Writable Stream - -#### `ws = new stream.Writable([options])` - -Create a new writable stream. - -Options include: - -``` -{ - highWaterMark: 16384, // max buffer size in bytes - map: (data) => data, // optional function to map input data - byteLength: (data) => size // optional function that calculates the byte size of input data -} -``` - -In addition you can pass the `open`, `write`, `flush`, and `destroy` functions as shorthands in -the constructor instead of overwrite the methods below. - -The default byteLength function returns the byte length of buffers and `1024` -for any other object. This means the buffer will contain around 16 non buffers -or buffers worth 16kb when full if the defaults are used. - -#### `ws._open(cb)` - -This function is called once before the first write is issued. Use this function -to implement your own open logic. - -Can also be set using `options.open` in the constructor. - -#### `ws._destroy(cb)` - -This function is called just before the stream is fully destroyed. You should -use this to implement whatever teardown logic you need. The final part of the -stream life cycle is always to call destroy itself so this function will always -be called wheather or not the stream ends gracefully or forcefully. - -Can also be set using `options.destroy` in the constructor. - -Note that the `_destroy` might be called without the open function being called -in case no write was ever performed on the stream. - -#### `ws._predestroy()` - -A simple hook that is called as soon as the first `stream.destroy()` call is invoked. - -Use this in case you need to cancel pending writes (if possible) instead of waiting for them to finish. - -Can also be set using `options.predestroy` in the constructor. - -#### `ws.destroy([error])` - -Forcefully destroy the stream. Will call `_destroy` as soon as all pending reads have finished. -Once the stream is fully destroyed `close` will be emitted. - -If you pass an error this error will be emitted just before `close` is, signifying a reason -as to why this stream was destroyed. - -#### `drained = ws.write(data)` - -Write a piece of data to the stream. Returns `true` if the stream buffer is not full and you -should keep writing to it if you can. If `false` is returned the stream will emit `drain` -once it's buffer is fully drained. - -#### `ws._write(data, callback)` - -This function is called when the stream want to write some data. Use this to implement your own -write logic. When done call the callback and the stream will call it again if more data exists in the buffer. - -Can also be set using `options.write` in the constructor. - -#### `ws._writev(batch, callback)` - -Similar to `_write` but passes an array of all data in the current write buffer instead of the oldest one. -Useful if the destination you are writing the data to supports batching. - -Can also be set using `options.writev` in the constructor. - -#### `ws.end()` - -Gracefully end the writable stream. Call this when you no longer want to write to the stream. - -Once all writes have been fully drained `finish` will be emitted. - -#### `ws._final(callback)` - -This function is called just before `finish` is emitted, i.e. when all writes have flushed but `ws.end()` -have been called. Use this to implement any logic that should happen after all writes but before finish. - -Can also be set using `options.final` in the constructor. - -#### `ws.on('finish')` - -Emitted when the stream has been ended and all writes have been drained. - -#### `ws.on('close')` - -Emitted when the readable stream has fully closed (i.e. it's destroy function has completed) - -#### `ws.on('error', err)` - -Emitted if any of the stream operations fail with an error. `close` is always emitted right after this. - -#### `ws.destroyed` - -Boolean property indicating wheather or not this stream has been destroyed. - -#### `bool = Writable.isBackpressured(ws)` - -Static method to check if a writable stream is currently under backpressure. - -## Duplex Stream - -#### `s = new stream.Duplex([options])` - -A duplex stream is a stream that is both readable and writable. - -Since JS does not support multiple inheritance it inherits directly from Readable -but implements the Writable API as well. - -If you want to provide only a map function for the readable side use `mapReadable` instead. -If you want to provide only a byteLength function for the readable side use `byteLengthReadable` instead. - -Same goes for the writable side but using `mapWritable` and `byteLengthWritable` instead. - -## Transform Stream - -#### `ts = new stream.Transform([options])` - -A transform stream is a duplex stream that maps the data written to it and emits that as readable data. - -Has the same options as a duplex stream except you can provide a `transform` function also. - -#### `ts._transform(data, callback)` - -Transform the incoming data. Call `callback(null, mappedData)` or use `ts.push(mappedData)` to -return data to the readable side of the stream. - -Per default the transform function just remits the incoming data making it act as a pass-through stream. - -## Contributing - -If you want to help contribute to streamx a good way to start is to help writing more test -cases, compatibility tests, documentation, or performance benchmarks. - -If in doubt open an issue :) - -## License - -MIT diff --git a/node_modules/streamx/examples/fs.js b/node_modules/streamx/examples/fs.js deleted file mode 100644 index f5f383a..0000000 --- a/node_modules/streamx/examples/fs.js +++ /dev/null @@ -1,73 +0,0 @@ -const fs = require('fs') -const { Writable, Readable } = require('../') - -class FileWriteStream extends Writable { - constructor (filename, mode) { - super() - this.filename = filename - this.mode = mode - this.fd = 0 - } - - _open (cb) { - fs.open(this.filename, this.mode, (err, fd) => { - if (err) return cb(err) - this.fd = fd - cb(null) - }) - } - - _write (data, cb) { - fs.write(this.fd, data, 0, data.length, null, (err, written) => { - if (err) return cb(err) - if (written !== data.length) return this._write(data.slice(written), cb) - cb(null) - }) - } - - _destroy (cb) { - if (!this.fd) return cb() - fs.close(this.fd, cb) - } -} - -class FileReadStream extends Readable { - constructor (filename) { - super() - this.filename = filename - this.fd = 0 - } - - _open (cb) { - fs.open(this.filename, 'r', (err, fd) => { - if (err) return cb(err) - this.fd = fd - cb(null) - }) - } - - _read (cb) { - let data = Buffer.alloc(16 * 1024) - - fs.read(this.fd, data, 0, data.length, null, (err, read) => { - if (err) return cb(err) - if (read !== data.length) data = data.slice(0, read) - this.push(data.length ? data : null) - cb(null) - }) - } - - _destroy (cb) { - if (!this.fd) return cb() - fs.close(this.fd, cb) - } -} - -// copy this file as an example - -const rs = new FileReadStream(__filename) -const ws = new FileWriteStream(`${__filename}.cpy`, 'w') - -rs.pipe(ws, function (err) { - console.log('file copied', err) -}) diff --git a/node_modules/streamx/index.js b/node_modules/streamx/index.js deleted file mode 100644 index 554630d..0000000 --- a/node_modules/streamx/index.js +++ /dev/null @@ -1,855 +0,0 @@ -const { EventEmitter } = require('events') -const STREAM_DESTROYED = new Error('Stream was destroyed') - -const FIFO = require('fast-fifo') - -/* eslint-disable no-multi-spaces */ - -const MAX = ((1 << 25) - 1) - -// Shared state -const OPENING = 0b001 -const DESTROYING = 0b010 -const DESTROYED = 0b100 - -const NOT_OPENING = MAX ^ OPENING - -// Read state -const READ_ACTIVE = 0b0000000000001 << 3 -const READ_PRIMARY = 0b0000000000010 << 3 -const READ_SYNC = 0b0000000000100 << 3 -const READ_QUEUED = 0b0000000001000 << 3 -const READ_RESUMED = 0b0000000010000 << 3 -const READ_PIPE_DRAINED = 0b0000000100000 << 3 -const READ_ENDING = 0b0000001000000 << 3 -const READ_EMIT_DATA = 0b0000010000000 << 3 -const READ_EMIT_READABLE = 0b0000100000000 << 3 -const READ_EMITTED_READABLE = 0b0001000000000 << 3 -const READ_DONE = 0b0010000000000 << 3 -const READ_NEXT_TICK = 0b0100000000001 << 3 // also active -const READ_NEEDS_PUSH = 0b1000000000000 << 3 - -const READ_NOT_ACTIVE = MAX ^ READ_ACTIVE -const READ_NON_PRIMARY = MAX ^ READ_PRIMARY -const READ_NON_PRIMARY_AND_PUSHED = MAX ^ (READ_PRIMARY | READ_NEEDS_PUSH) -const READ_NOT_SYNC = MAX ^ READ_SYNC -const READ_PUSHED = MAX ^ READ_NEEDS_PUSH -const READ_PAUSED = MAX ^ READ_RESUMED -const READ_NOT_QUEUED = MAX ^ (READ_QUEUED | READ_EMITTED_READABLE) -const READ_NOT_ENDING = MAX ^ READ_ENDING -const READ_PIPE_NOT_DRAINED = MAX ^ (READ_RESUMED | READ_PIPE_DRAINED) -const READ_NOT_NEXT_TICK = MAX ^ READ_NEXT_TICK - -// Write state -const WRITE_ACTIVE = 0b000000001 << 16 -const WRITE_PRIMARY = 0b000000010 << 16 -const WRITE_SYNC = 0b000000100 << 16 -const WRITE_QUEUED = 0b000001000 << 16 -const WRITE_UNDRAINED = 0b000010000 << 16 -const WRITE_DONE = 0b000100000 << 16 -const WRITE_EMIT_DRAIN = 0b001000000 << 16 -const WRITE_NEXT_TICK = 0b010000001 << 16 // also active -const WRITE_FINISHING = 0b100000000 << 16 - -const WRITE_NOT_ACTIVE = MAX ^ WRITE_ACTIVE -const WRITE_NOT_SYNC = MAX ^ WRITE_SYNC -const WRITE_NON_PRIMARY = MAX ^ WRITE_PRIMARY -const WRITE_NOT_FINISHING = MAX ^ WRITE_FINISHING -const WRITE_DRAINED = MAX ^ WRITE_UNDRAINED -const WRITE_NOT_QUEUED = MAX ^ WRITE_QUEUED -const WRITE_NOT_NEXT_TICK = MAX ^ WRITE_NEXT_TICK - -// Combined shared state -const ACTIVE = READ_ACTIVE | WRITE_ACTIVE -const NOT_ACTIVE = MAX ^ ACTIVE -const DONE = READ_DONE | WRITE_DONE -const DESTROY_STATUS = DESTROYING | DESTROYED -const OPEN_STATUS = DESTROY_STATUS | OPENING -const AUTO_DESTROY = DESTROY_STATUS | DONE -const NON_PRIMARY = WRITE_NON_PRIMARY & READ_NON_PRIMARY - -// Combined read state -const READ_PRIMARY_STATUS = OPEN_STATUS | READ_ENDING | READ_DONE -const READ_STATUS = OPEN_STATUS | READ_DONE | READ_QUEUED -const READ_FLOWING = READ_RESUMED | READ_PIPE_DRAINED -const READ_ACTIVE_AND_SYNC = READ_ACTIVE | READ_SYNC -const READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH = READ_ACTIVE | READ_SYNC | READ_NEEDS_PUSH -const READ_PRIMARY_AND_ACTIVE = READ_PRIMARY | READ_ACTIVE -const READ_ENDING_STATUS = OPEN_STATUS | READ_ENDING | READ_QUEUED -const READ_EMIT_READABLE_AND_QUEUED = READ_EMIT_READABLE | READ_QUEUED -const READ_READABLE_STATUS = OPEN_STATUS | READ_EMIT_READABLE | READ_QUEUED | READ_EMITTED_READABLE -const SHOULD_NOT_READ = OPEN_STATUS | READ_ACTIVE | READ_ENDING | READ_DONE | READ_NEEDS_PUSH -const READ_BACKPRESSURE_STATUS = DESTROY_STATUS | READ_ENDING | READ_DONE - -// Combined write state -const WRITE_PRIMARY_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_DONE -const WRITE_QUEUED_AND_UNDRAINED = WRITE_QUEUED | WRITE_UNDRAINED -const WRITE_QUEUED_AND_ACTIVE = WRITE_QUEUED | WRITE_ACTIVE -const WRITE_DRAIN_STATUS = WRITE_QUEUED | WRITE_UNDRAINED | OPEN_STATUS | WRITE_ACTIVE -const WRITE_STATUS = OPEN_STATUS | WRITE_ACTIVE | WRITE_QUEUED -const WRITE_PRIMARY_AND_ACTIVE = WRITE_PRIMARY | WRITE_ACTIVE -const WRITE_ACTIVE_AND_SYNC = WRITE_ACTIVE | WRITE_SYNC -const WRITE_FINISHING_STATUS = OPEN_STATUS | WRITE_FINISHING | WRITE_QUEUED -const WRITE_BACKPRESSURE_STATUS = WRITE_UNDRAINED | DESTROY_STATUS | WRITE_FINISHING | WRITE_DONE - -const asyncIterator = Symbol.asyncIterator || Symbol('asyncIterator') - -class WritableState { - constructor (stream, { highWaterMark = 16384, map = null, mapWritable, byteLength, byteLengthWritable } = {}) { - this.stream = stream - this.queue = new FIFO() - this.highWaterMark = highWaterMark - this.buffered = 0 - this.error = null - this.pipeline = null - this.byteLength = byteLengthWritable || byteLength || defaultByteLength - this.map = mapWritable || map - this.afterWrite = afterWrite.bind(this) - } - - push (data) { - if (this.map !== null) data = this.map(data) - - this.buffered += this.byteLength(data) - this.queue.push(data) - - if (this.buffered < this.highWaterMark) { - this.stream._duplexState |= WRITE_QUEUED - return true - } - - this.stream._duplexState |= WRITE_QUEUED_AND_UNDRAINED - return false - } - - shift () { - const data = this.queue.shift() - const stream = this.stream - - this.buffered -= this.byteLength(data) - if (this.buffered === 0) stream._duplexState &= WRITE_NOT_QUEUED - - return data - } - - end (data) { - if (data !== undefined && data !== null) this.push(data) - this.stream._duplexState = (this.stream._duplexState | WRITE_FINISHING) & WRITE_NON_PRIMARY - } - - autoBatch (data, cb) { - const buffer = [] - const stream = this.stream - - buffer.push(data) - while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED_AND_ACTIVE) { - buffer.push(stream._writableState.shift()) - } - - if ((stream._duplexState & OPEN_STATUS) !== 0) return cb(null) - stream._writev(buffer, cb) - } - - update () { - const stream = this.stream - - while ((stream._duplexState & WRITE_STATUS) === WRITE_QUEUED) { - const data = this.shift() - stream._duplexState |= WRITE_ACTIVE_AND_SYNC - stream._write(data, this.afterWrite) - stream._duplexState &= WRITE_NOT_SYNC - } - - if ((stream._duplexState & WRITE_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() - } - - updateNonPrimary () { - const stream = this.stream - - if ((stream._duplexState & WRITE_FINISHING_STATUS) === WRITE_FINISHING) { - stream._duplexState = (stream._duplexState | WRITE_ACTIVE) & WRITE_NOT_FINISHING - stream._final(afterFinal.bind(this)) - return - } - - if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { - if ((stream._duplexState & ACTIVE) === 0) { - stream._duplexState |= ACTIVE - stream._destroy(afterDestroy.bind(this)) - } - return - } - - if ((stream._duplexState & OPEN_STATUS) === OPENING) { - stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING - stream._open(afterOpen.bind(this)) - } - } - - updateNextTick () { - if ((this.stream._duplexState & WRITE_NEXT_TICK) !== 0) return - this.stream._duplexState |= WRITE_NEXT_TICK - process.nextTick(updateWriteNT, this) - } -} - -class ReadableState { - constructor (stream, { highWaterMark = 16384, map = null, mapReadable, byteLength, byteLengthReadable } = {}) { - this.stream = stream - this.queue = new FIFO() - this.highWaterMark = highWaterMark - this.buffered = 0 - this.error = null - this.pipeline = null - this.byteLength = byteLengthReadable || byteLength || defaultByteLength - this.map = mapReadable || map - this.pipeTo = null - this.afterRead = afterRead.bind(this) - } - - pipe (pipeTo, cb) { - if (this.pipeTo !== null) throw new Error('Can only pipe to one destination') - - this.stream._duplexState |= READ_PIPE_DRAINED - this.pipeTo = pipeTo - this.pipeline = new Pipeline(this.stream, pipeTo, cb || null) - - if (cb) this.stream.on('error', noop) // We already error handle this so supress crashes - - if (isStreamx(pipeTo)) { - pipeTo._writableState.pipeline = this.pipeline - if (cb) pipeTo.on('error', noop) // We already error handle this so supress crashes - pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) // TODO: just call finished from pipeTo itself - } else { - const onerror = this.pipeline.done.bind(this.pipeline, pipeTo) - const onclose = this.pipeline.done.bind(this.pipeline, pipeTo, null) // onclose has a weird bool arg - pipeTo.on('error', onerror) - pipeTo.on('close', onclose) - pipeTo.on('finish', this.pipeline.finished.bind(this.pipeline)) - } - - pipeTo.on('drain', afterDrain.bind(this)) - this.stream.emit('pipe', pipeTo) - } - - push (data) { - const stream = this.stream - - if (data === null) { - this.highWaterMark = 0 - stream._duplexState = (stream._duplexState | READ_ENDING) & READ_NON_PRIMARY_AND_PUSHED - return false - } - - if (this.map !== null) data = this.map(data) - this.buffered += this.byteLength(data) - this.queue.push(data) - - stream._duplexState = (stream._duplexState | READ_QUEUED) & READ_PUSHED - - return this.buffered < this.highWaterMark - } - - shift () { - const data = this.queue.shift() - - this.buffered -= this.byteLength(data) - if (this.buffered === 0) this.stream._duplexState &= READ_NOT_QUEUED - return data - } - - unshift (data) { - let tail - const pending = [] - - while ((tail === this.queue.shift()) !== undefined) { - pending.push(tail) - } - - this.push(data) - - for (let i = 0; i < pending.length; i++) { - this.queue.push(pending[i]) - } - } - - read () { - const stream = this.stream - - if ((stream._duplexState & READ_STATUS) === READ_QUEUED) { - const data = this.shift() - if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) - if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED - return data - } - - return null - } - - drain () { - const stream = this.stream - - while ((stream._duplexState & READ_STATUS) === READ_QUEUED && (stream._duplexState & READ_FLOWING) !== 0) { - const data = this.shift() - if ((stream._duplexState & READ_EMIT_DATA) !== 0) stream.emit('data', data) - if (this.pipeTo !== null && this.pipeTo.write(data) === false) stream._duplexState &= READ_PIPE_NOT_DRAINED - } - } - - update () { - const stream = this.stream - - this.drain() - - while (this.buffered < this.highWaterMark && (stream._duplexState & SHOULD_NOT_READ) === 0) { - stream._duplexState |= READ_ACTIVE_AND_SYNC_AND_NEEDS_PUSH - stream._read(this.afterRead) - stream._duplexState &= READ_NOT_SYNC - if ((stream._duplexState & READ_ACTIVE) === 0) this.drain() - } - - if ((stream._duplexState & READ_READABLE_STATUS) === READ_EMIT_READABLE_AND_QUEUED) { - stream._duplexState |= READ_EMITTED_READABLE - stream.emit('readable') - } - - if ((stream._duplexState & READ_PRIMARY_AND_ACTIVE) === 0) this.updateNonPrimary() - } - - updateNonPrimary () { - const stream = this.stream - - if ((stream._duplexState & READ_ENDING_STATUS) === READ_ENDING) { - stream._duplexState = (stream._duplexState | READ_DONE) & READ_NOT_ENDING - stream.emit('end') - if ((stream._duplexState & AUTO_DESTROY) === DONE) stream._duplexState |= DESTROYING - if (this.pipeTo !== null) this.pipeTo.end() - } - - if ((stream._duplexState & DESTROY_STATUS) === DESTROYING) { - if ((stream._duplexState & ACTIVE) === 0) { - stream._duplexState |= ACTIVE - stream._destroy(afterDestroy.bind(this)) - } - return - } - - if ((stream._duplexState & OPEN_STATUS) === OPENING) { - stream._duplexState = (stream._duplexState | ACTIVE) & NOT_OPENING - stream._open(afterOpen.bind(this)) - } - } - - updateNextTick () { - if ((this.stream._duplexState & READ_NEXT_TICK) !== 0) return - this.stream._duplexState |= READ_NEXT_TICK - process.nextTick(updateReadNT, this) - } -} - -class TransformState { - constructor (stream) { - this.data = null - this.afterTransform = afterTransform.bind(stream) - this.afterFinal = null - } -} - -class Pipeline { - constructor (src, dst, cb) { - this.from = src - this.to = dst - this.afterPipe = cb - this.error = null - this.pipeToFinished = false - } - - finished () { - this.pipeToFinished = true - } - - done (stream, err) { - if (err) this.error = err - - if (stream === this.to) { - this.to = null - - if (this.from !== null) { - if ((this.from._duplexState & READ_DONE) === 0 || !this.pipeToFinished) { - this.from.destroy(new Error('Writable stream closed prematurely')) - } - return - } - } - - if (stream === this.from) { - this.from = null - - if (this.to !== null) { - if ((stream._duplexState & READ_DONE) === 0) { - this.to.destroy(new Error('Readable stream closed before ending')) - } - return - } - } - - if (this.afterPipe !== null) this.afterPipe(this.error) - this.to = this.from = this.afterPipe = null - } -} - -function afterDrain () { - this.stream._duplexState |= READ_PIPE_DRAINED - if ((this.stream._duplexState & READ_ACTIVE_AND_SYNC) === 0) this.updateNextTick() -} - -function afterFinal (err) { - const stream = this.stream - if (err) stream.destroy(err) - if ((stream._duplexState & DESTROY_STATUS) === 0) { - stream._duplexState |= WRITE_DONE - stream.emit('finish') - } - if ((stream._duplexState & AUTO_DESTROY) === DONE) { - stream._duplexState |= DESTROYING - } - - stream._duplexState &= WRITE_NOT_ACTIVE - this.update() -} - -function afterDestroy (err) { - const stream = this.stream - - if (!err && this.error !== STREAM_DESTROYED) err = this.error - if (err) stream.emit('error', err) - stream._duplexState |= DESTROYED - stream.emit('close') - - const rs = stream._readableState - const ws = stream._writableState - - if (rs !== null && rs.pipeline !== null) rs.pipeline.done(stream, err) - if (ws !== null && ws.pipeline !== null) ws.pipeline.done(stream, err) -} - -function afterWrite (err) { - const stream = this.stream - - if (err) stream.destroy(err) - stream._duplexState &= WRITE_NOT_ACTIVE - - if ((stream._duplexState & WRITE_DRAIN_STATUS) === WRITE_UNDRAINED) { - stream._duplexState &= WRITE_DRAINED - if ((stream._duplexState & WRITE_EMIT_DRAIN) === WRITE_EMIT_DRAIN) { - stream.emit('drain') - } - } - - if ((stream._duplexState & WRITE_SYNC) === 0) this.update() -} - -function afterRead (err) { - if (err) this.stream.destroy(err) - this.stream._duplexState &= READ_NOT_ACTIVE - if ((this.stream._duplexState & READ_SYNC) === 0) this.update() -} - -function updateReadNT (rs) { - rs.stream._duplexState &= READ_NOT_NEXT_TICK - rs.update() -} - -function updateWriteNT (ws) { - ws.stream._duplexState &= WRITE_NOT_NEXT_TICK - ws.update() -} - -function afterOpen (err) { - const stream = this.stream - - if (err) stream.destroy(err) - - if ((stream._duplexState & DESTROYING) === 0) { - if ((stream._duplexState & READ_PRIMARY_STATUS) === 0) stream._duplexState |= READ_PRIMARY - if ((stream._duplexState & WRITE_PRIMARY_STATUS) === 0) stream._duplexState |= WRITE_PRIMARY - stream.emit('open') - } - - stream._duplexState &= NOT_ACTIVE - - if (stream._writableState !== null) { - stream._writableState.update() - } - - if (stream._readableState !== null) { - stream._readableState.update() - } -} - -function afterTransform (err, data) { - if (data !== undefined && data !== null) this.push(data) - this._writableState.afterWrite(err) -} - -class Stream extends EventEmitter { - constructor (opts) { - super() - - this._duplexState = 0 - this._readableState = null - this._writableState = null - - if (opts) { - if (opts.open) this._open = opts.open - if (opts.destroy) this._destroy = opts.destroy - if (opts.predestroy) this._predestroy = opts.predestroy - } - } - - _open (cb) { - cb(null) - } - - _destroy (cb) { - cb(null) - } - - _predestroy () { - // does nothing - } - - get readable () { - return this._readableState !== null - } - - get writable () { - return this._writableState !== null - } - - get destroyed () { - return (this._duplexState & DESTROYED) !== 0 - } - - get destroying () { - return (this._duplexState & DESTROY_STATUS) !== 0 - } - - destroy (err) { - if ((this._duplexState & DESTROY_STATUS) === 0) { - if (!err) err = STREAM_DESTROYED - this._duplexState = (this._duplexState | DESTROYING) & NON_PRIMARY - this._predestroy() - if (this._readableState !== null) { - this._readableState.error = err - this._readableState.updateNextTick() - } - if (this._writableState !== null) { - this._writableState.error = err - this._writableState.updateNextTick() - } - } - } - - on (name, fn) { - if (this._readableState !== null) { - if (name === 'data') { - this._duplexState |= (READ_EMIT_DATA | READ_RESUMED) - this._readableState.updateNextTick() - } - if (name === 'readable') { - this._duplexState |= READ_EMIT_READABLE - this._readableState.updateNextTick() - } - } - - if (this._writableState !== null) { - if (name === 'drain') { - this._duplexState |= WRITE_EMIT_DRAIN - this._writableState.updateNextTick() - } - } - - return super.on(name, fn) - } -} - -class Readable extends Stream { - constructor (opts) { - super(opts) - - this._duplexState |= OPENING | WRITE_DONE - this._readableState = new ReadableState(this, opts) - - if (opts) { - if (opts.read) this._read = opts.read - } - } - - _read (cb) { - cb(null) - } - - pipe (dest, cb) { - this._readableState.pipe(dest, cb) - this._readableState.updateNextTick() - return dest - } - - read () { - this._readableState.updateNextTick() - return this._readableState.read() - } - - push (data) { - this._readableState.updateNextTick() - return this._readableState.push(data) - } - - unshift (data) { - this._readableState.updateNextTick() - return this._readableState.unshift(data) - } - - resume () { - this._duplexState |= READ_RESUMED - this._readableState.updateNextTick() - } - - pause () { - this._duplexState &= READ_PAUSED - } - - static _fromAsyncIterator (ite) { - let destroy - - const rs = new Readable({ - read (cb) { - ite.next().then(push).then(cb.bind(null, null)).catch(cb) - }, - predestroy () { - destroy = ite.return() - }, - destroy (cb) { - destroy.then(cb.bind(null, null)).catch(cb) - } - }) - - return rs - - function push (data) { - if (data.done) rs.push(null) - else rs.push(data.value) - } - } - - static from (data) { - if (data[asyncIterator]) return this._fromAsyncIterator(data[asyncIterator]()) - if (!Array.isArray(data)) data = data === undefined ? [] : [data] - - let i = 0 - return new Readable({ - read (cb) { - this.push(i === data.length ? null : data[i++]) - cb(null) - } - }) - } - - static isBackpressured (rs) { - return (rs._duplexState & READ_BACKPRESSURE_STATUS) !== 0 || rs._readableState.buffered >= rs._readableState.highWaterMark - } - - [asyncIterator] () { - const stream = this - - let error = null - let ended = false - let promiseResolve - let promiseReject - - this.on('error', (err) => { error = err }) - this.on('end', () => { ended = true }) - this.on('close', () => call(error, null)) - this.on('readable', () => call(null, stream.read())) - - return { - [asyncIterator] () { - return this - }, - next () { - return new Promise(function (resolve, reject) { - promiseResolve = resolve - promiseReject = reject - const data = stream.read() - if (data !== null) call(null, data) - }) - }, - return () { - stream.destroy() - } - } - - function call (err, data) { - if (promiseReject === null) return - if (err) promiseReject(err) - else if (data === null && !ended) promiseReject(STREAM_DESTROYED) - else promiseResolve({ value: data, done: data === null }) - promiseReject = promiseResolve = null - } - } -} - -class Writable extends Stream { - constructor (opts) { - super(opts) - - this._duplexState |= OPENING | READ_DONE - this._writableState = new WritableState(this, opts) - - if (opts) { - if (opts.writev) this._writev = opts.writev - if (opts.write) this._write = opts.write - if (opts.final) this._final = opts.final - } - } - - _writev (batch, cb) { - cb(null) - } - - _write (data, cb) { - this._writableState.autoBatch(data, cb) - } - - _final (cb) { - cb(null) - } - - static isBackpressured (ws) { - return (ws._duplexState & WRITE_BACKPRESSURE_STATUS) !== 0 - } - - write (data) { - this._writableState.updateNextTick() - return this._writableState.push(data) - } - - end (data) { - this._writableState.updateNextTick() - this._writableState.end(data) - } -} - -class Duplex extends Readable { // and Writable - constructor (opts) { - super(opts) - - this._duplexState = OPENING - this._writableState = new WritableState(this, opts) - - if (opts) { - if (opts.writev) this._writev = opts.writev - if (opts.write) this._write = opts.write - if (opts.final) this._final = opts.final - } - } - - _writev (batch, cb) { - cb(null) - } - - _write (data, cb) { - this._writableState.autoBatch(data, cb) - } - - _final (cb) { - cb(null) - } - - write (data) { - this._writableState.updateNextTick() - return this._writableState.push(data) - } - - end (data) { - this._writableState.updateNextTick() - this._writableState.end(data) - } -} - -class Transform extends Duplex { - constructor (opts) { - super(opts) - this._transformState = new TransformState(this) - - if (opts) { - if (opts.transform) this._transform = opts.transform - if (opts.flush) this._flush = opts.flush - } - } - - _write (data, cb) { - if (this._readableState.buffered >= this._readableState.highWaterMark) { - this._transformState.data = data - } else { - this._transform(data, this._transformState.afterTransform) - } - } - - _read (cb) { - if (this._transformState.data !== null) { - const data = this._transformState.data - this._transformState.data = null - cb(null) - this._transform(data, this._transformState.afterTransform) - } else { - cb(null) - } - } - - _transform (data, cb) { - cb(null, data) - } - - _flush (cb) { - cb(null) - } - - _final (cb) { - this._transformState.afterFinal = cb - this._flush(transformAfterFlush.bind(this)) - } -} - -function transformAfterFlush (err, data) { - const cb = this._transformState.afterFinal - if (err) return cb(err) - if (data !== null && data !== undefined) this.push(data) - this.push(null) - cb(null) -} - -function isStream (stream) { - return !!stream._readableState || !!stream._writableState -} - -function isStreamx (stream) { - return typeof stream._duplexState === 'number' && isStream(stream) -} - -function defaultByteLength (data) { - return Buffer.isBuffer(data) ? data.length : 1024 -} - -function noop () {} - -module.exports = { - isStream, - isStreamx, - Stream, - Writable, - Readable, - Duplex, - Transform -} diff --git a/node_modules/streamx/package.json b/node_modules/streamx/package.json deleted file mode 100644 index d73242e..0000000 --- a/node_modules/streamx/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "_from": "streamx@^2.6.0", - "_id": "streamx@2.6.0", - "_inBundle": false, - "_integrity": "sha512-fDIdCkmCjhNt9/IIFL6UdJeqyOpDoX9rHXdRt5hpYQQ1owCSyVieQtTkbXcZAsASS9F5JrlxwKLHkv5w2GHVvA==", - "_location": "/streamx", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "streamx@^2.6.0", - "name": "streamx", - "escapedName": "streamx", - "rawSpec": "^2.6.0", - "saveSpec": null, - "fetchSpec": "^2.6.0" - }, - "_requiredBy": [ - "/async-neocities" - ], - "_resolved": "https://registry.npmjs.org/streamx/-/streamx-2.6.0.tgz", - "_shasum": "f7aa741edc8142666c12d09479e708fdc2a09c33", - "_spec": "streamx@^2.6.0", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities", - "author": { - "name": "Mathias Buus", - "url": "@mafintosh" - }, - "bugs": { - "url": "https://github.com/mafintosh/streamx/issues" - }, - "bundleDependencies": false, - "dependencies": { - "fast-fifo": "^1.0.0", - "nanoassert": "^2.0.0" - }, - "deprecated": false, - "description": "An iteration of the Node.js core streams with a series of improvements", - "devDependencies": { - "end-of-stream": "^1.4.1", - "standard": "^14.3.1", - "tape": "^4.11.0" - }, - "homepage": "https://github.com/mafintosh/streamx", - "license": "MIT", - "main": "index.js", - "name": "streamx", - "repository": { - "type": "git", - "url": "git+https://github.com/mafintosh/streamx.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - }, - "version": "2.6.0" -} diff --git a/node_modules/streamx/test/async-iterator.js b/node_modules/streamx/test/async-iterator.js deleted file mode 100644 index 4254bdc..0000000 --- a/node_modules/streamx/test/async-iterator.js +++ /dev/null @@ -1,21 +0,0 @@ -const tape = require('tape') -const { Readable } = require('../') - -tape('streams are async iterators', async function (t) { - const data = ['a', 'b', 'c', null] - const expected = data.slice(0) - - const r = new Readable({ - read (cb) { - this.push(data.shift()) - cb(null) - } - }) - - for await (const chunk of r) { - t.same(chunk, expected.shift()) - } - - t.same(expected, [null]) - t.end() -}) diff --git a/node_modules/streamx/test/backpressure.js b/node_modules/streamx/test/backpressure.js deleted file mode 100644 index 467ee22..0000000 --- a/node_modules/streamx/test/backpressure.js +++ /dev/null @@ -1,105 +0,0 @@ -const tape = require('tape') -const { Writable, Readable } = require('../') - -tape('write backpressure', function (t) { - const ws = new Writable() - - for (let i = 0; i < 15; i++) { - t.ok(ws.write('a'), 'not backpressured') - t.notOk(Writable.isBackpressured(ws), 'static check') - } - - t.notOk(ws.write('a'), 'backpressured') - t.ok(Writable.isBackpressured(ws), 'static check') - - t.end() -}) - -tape('write backpressure with drain', function (t) { - const ws = new Writable() - - for (let i = 0; i < 15; i++) { - t.ok(ws.write('a'), 'not backpressured') - t.notOk(Writable.isBackpressured(ws), 'static check') - } - - t.notOk(ws.write('a'), 'backpressured') - t.ok(Writable.isBackpressured(ws), 'static check') - - ws.on('drain', function () { - t.notOk(Writable.isBackpressured(ws)) - t.end() - }) -}) - -tape('write backpressure with destroy', function (t) { - const ws = new Writable() - - ws.write('a') - ws.destroy() - - t.ok(Writable.isBackpressured(ws)) - t.end() -}) - -tape('write backpressure with end', function (t) { - const ws = new Writable() - - ws.write('a') - ws.end() - - t.ok(Writable.isBackpressured(ws)) - t.end() -}) - -tape('read backpressure', function (t) { - const rs = new Readable() - - for (let i = 0; i < 15; i++) { - t.ok(rs.push('a'), 'not backpressured') - t.notOk(Readable.isBackpressured(rs), 'static check') - } - - t.notOk(rs.push('a'), 'backpressured') - t.ok(Readable.isBackpressured(rs), 'static check') - - t.end() -}) - -tape('read backpressure with later read', function (t) { - const rs = new Readable() - - for (let i = 0; i < 15; i++) { - t.ok(rs.push('a'), 'not backpressured') - t.notOk(Readable.isBackpressured(rs), 'static check') - } - - t.notOk(rs.push('a'), 'backpressured') - t.ok(Readable.isBackpressured(rs), 'static check') - - rs.once('readable', function () { - rs.read() - t.notOk(Readable.isBackpressured(rs)) - t.end() - }) -}) - -tape('read backpressure with destroy', function (t) { - const rs = new Readable() - - rs.push('a') - rs.destroy() - - t.ok(Readable.isBackpressured(rs)) - t.end() -}) - -tape('read backpressure with push(null)', function (t) { - const rs = new Readable() - - rs.push('a') - rs.push(null) - - t.ok(Readable.isBackpressured(rs)) - t.end() -}) diff --git a/node_modules/streamx/test/compat.js b/node_modules/streamx/test/compat.js deleted file mode 100644 index a24b860..0000000 --- a/node_modules/streamx/test/compat.js +++ /dev/null @@ -1,178 +0,0 @@ -const eos = require('end-of-stream') -const tape = require('tape') -const stream = require('../') -const finished = require('stream').finished - -run(eos) -run(finished) - -function run (eos) { - if (!eos) return - const name = eos === finished ? 'nodeStream.finished' : 'eos' - tape(name + ' readable', function (t) { - const r = new stream.Readable() - let ended = false - - r.on('end', function () { - ended = true - }) - - eos(r, function (err) { - t.error(err, 'no error') - t.ok(ended) - t.end() - }) - - r.push('hello') - r.push(null) - r.resume() - }) - - tape(name + ' readable destroy', function (t) { - const r = new stream.Readable() - let ended = false - - r.on('end', function () { - ended = true - }) - - eos(r, function (err) { - t.ok(err, 'had error') - t.notOk(ended) - t.end() - }) - - r.push('hello') - r.push(null) - r.resume() - r.destroy() - }) - - tape(name + ' writable', function (t) { - const w = new stream.Writable() - let finished = false - - w.on('finish', function () { - finished = true - }) - - eos(w, function (err) { - t.error(err, 'no error') - t.ok(finished) - t.end() - }) - - w.write('hello') - w.end() - }) - - tape(name + ' writable destroy', function (t) { - const w = new stream.Writable() - let finished = false - - w.on('finish', function () { - finished = true - }) - - eos(w, function (err) { - t.ok(err, 'had error') - t.notOk(finished) - t.end() - }) - - w.write('hello') - w.end() - w.destroy() - }) - - tape(name + ' duplex', function (t) { - const s = new stream.Duplex() - let ended = false - let finished = false - - s.on('end', () => { ended = true }) - s.on('finish', () => { finished = true }) - - eos(s, function (err) { - t.error(err, 'no error') - t.ok(ended) - t.ok(finished) - t.end() - }) - - s.push('hello') - s.push(null) - s.resume() - s.end() - }) - - tape(name + ' duplex + deferred s.end()', function (t) { - const s = new stream.Duplex() - let ended = false - let finished = false - - s.on('end', function () { - ended = true - setImmediate(() => s.end()) - }) - - s.on('finish', () => { finished = true }) - - eos(s, function (err) { - t.error(err, 'no error') - t.ok(ended) - t.ok(finished) - t.end() - }) - - s.push('hello') - s.push(null) - s.resume() - }) - - tape(name + ' duplex + deferred s.push(null)', function (t) { - const s = new stream.Duplex() - let ended = false - let finished = false - - s.on('finish', function () { - finished = true - setImmediate(() => s.push(null)) - }) - - s.on('end', () => { ended = true }) - - eos(s, function (err) { - t.error(err, 'no error') - t.ok(ended) - t.ok(finished) - t.end() - }) - - s.push('hello') - s.end() - s.resume() - }) - - tape(name + ' duplex destroy', function (t) { - const s = new stream.Duplex() - let ended = false - let finished = false - - s.on('end', () => { ended = true }) - s.on('finish', () => { finished = true }) - - eos(s, function (err) { - t.ok(err, 'had error') - t.notOk(ended) - t.notOk(finished) - t.end() - }) - - s.push('hello') - s.push(null) - s.resume() - s.end() - s.destroy() - }) -} diff --git a/node_modules/streamx/test/pipe.js b/node_modules/streamx/test/pipe.js deleted file mode 100644 index f6380e8..0000000 --- a/node_modules/streamx/test/pipe.js +++ /dev/null @@ -1,134 +0,0 @@ -const tape = require('tape') -const compat = require('stream') -const { Readable, Writable } = require('../') - -tape('pipe to node stream', function (t) { - const expected = [ - 'hi', - 'ho' - ] - - const r = new Readable() - const w = new compat.Writable({ - objectMode: true, - write (data, enc, cb) { - t.same(data, expected.shift()) - cb(null) - } - }) - - r.push('hi') - r.push('ho') - r.push(null) - - r.pipe(w) - - w.on('finish', function () { - t.same(expected.length, 0) - t.end() - }) -}) - -tape('pipe with callback - error case', function (t) { - const r = new Readable() - const w = new Writable({ - write (data, cb) { - cb(new Error('blerg')) - } - }) - - r.pipe(w, function (err) { - t.pass('callback called') - t.same(err, new Error('blerg')) - t.end() - }) - - r.push('hello') - r.push('world') - r.push(null) -}) - -tape('pipe with callback - error case with destroy', function (t) { - const r = new Readable() - const w = new Writable({ - write (data, cb) { - w.destroy(new Error('blerg')) - cb(null) - } - }) - - r.pipe(w, function (err) { - t.pass('callback called') - t.same(err, new Error('blerg')) - t.end() - }) - - r.push('hello') - r.push('world') -}) - -tape('pipe with callback - error case node stream', function (t) { - const r = new Readable() - const w = new compat.Writable({ - write (data, enc, cb) { - cb(new Error('blerg')) - } - }) - - r.pipe(w, function (err) { - t.pass('callback called') - t.same(err, new Error('blerg')) - t.end() - }) - - r.push('hello') - r.push('world') - r.push(null) -}) - -tape('simple pipe', function (t) { - const buffered = [] - - const r = new Readable() - const w = new Writable({ - write (data, cb) { - buffered.push(data) - cb(null) - }, - - final () { - t.pass('final called') - t.same(buffered, ['hello', 'world']) - t.end() - } - }) - - r.pipe(w) - - r.push('hello') - r.push('world') - r.push(null) -}) - -tape('pipe with callback', function (t) { - const buffered = [] - - const r = new Readable() - const w = new Writable({ - write (data, cb) { - buffered.push(data) - cb(null) - } - }) - - r.pipe(w, function (err) { - t.pass('callback called') - t.same(err, null) - t.same(buffered, ['hello', 'world']) - t.end() - }) - - r.push('hello') - r.push('world') - r.push(null) -}) diff --git a/node_modules/streamx/test/readable.js b/node_modules/streamx/test/readable.js deleted file mode 100644 index b138da3..0000000 --- a/node_modules/streamx/test/readable.js +++ /dev/null @@ -1,119 +0,0 @@ -const tape = require('tape') -const { Readable } = require('../') - -tape('ondata', function (t) { - const r = new Readable() - const buffered = [] - let ended = 0 - - r.push('hello') - r.push('world') - r.push(null) - - r.on('data', data => buffered.push(data)) - r.on('end', () => ended++) - r.on('close', function () { - t.pass('closed') - t.same(buffered, ['hello', 'world']) - t.same(ended, 1) - t.ok(r.destroyed) - t.end() - }) -}) - -tape('resume', function (t) { - const r = new Readable() - let ended = 0 - - r.push('hello') - r.push('world') - r.push(null) - - r.resume() - r.on('end', () => ended++) - r.on('close', function () { - t.pass('closed') - t.same(ended, 1) - t.ok(r.destroyed) - t.end() - }) -}) - -tape('shorthands', function (t) { - t.plan(3 + 1) - - const r = new Readable({ - read (cb) { - this.push('hello') - cb(null) - }, - destroy (cb) { - t.pass('destroyed') - cb(null) - } - }) - - r.once('readable', function () { - t.same(r.read(), 'hello') - t.same(r.read(), 'hello') - r.destroy() - t.same(r.read(), null) - }) -}) - -tape('both push and the cb needs to be called for re-reads', function (t) { - t.plan(2) - - let once = true - - const r = new Readable({ - read (cb) { - t.ok(once, 'read called once') - once = false - cb(null) - } - }) - - r.resume() - - setTimeout(function () { - once = true - r.push('hi') - }, 100) -}) - -tape('from array', function (t) { - const inc = [] - const r = Readable.from([1, 2, 3]) - r.on('data', data => inc.push(data)) - r.on('end', function () { - t.same(inc, [1, 2, 3]) - t.end() - }) -}) - -tape('from buffer', function (t) { - const inc = [] - const r = Readable.from(Buffer.from('hello')) - r.on('data', data => inc.push(data)) - r.on('end', function () { - t.same(inc, [Buffer.from('hello')]) - t.end() - }) -}) - -tape('from async iterator', function (t) { - async function * test () { - yield 1 - yield 2 - yield 3 - } - - const inc = [] - const r = Readable.from(test()) - r.on('data', data => inc.push(data)) - r.on('end', function () { - t.same(inc, [1, 2, 3]) - t.end() - }) -}) diff --git a/node_modules/streamx/test/writable.js b/node_modules/streamx/test/writable.js deleted file mode 100644 index a084459..0000000 --- a/node_modules/streamx/test/writable.js +++ /dev/null @@ -1,103 +0,0 @@ -const tape = require('tape') -const { Writable } = require('../') - -tape('opens before writes', function (t) { - t.plan(2) - const trace = [] - const stream = new Writable({ - open (cb) { - trace.push('open') - return cb(null) - }, - write (data, cb) { - trace.push('write') - return cb(null) - } - }) - stream.on('close', () => { - t.equals(trace.length, 2) - t.equals(trace[0], 'open') - }) - stream.write('data') - stream.end() -}) - -tape('drain', function (t) { - const stream = new Writable({ - highWaterMark: 1, - write (data, cb) { - cb(null) - } - }) - - t.notOk(stream.write('a')) - stream.on('drain', function () { - t.pass('drained') - t.end() - }) -}) - -tape('drain multi write', function (t) { - t.plan(4) - - const stream = new Writable({ - highWaterMark: 1, - write (data, cb) { - cb(null) - } - }) - - t.notOk(stream.write('a')) - t.notOk(stream.write('a')) - t.notOk(stream.write('a')) - stream.on('drain', function () { - t.pass('drained') - t.end() - }) -}) - -tape('drain async write', function (t) { - let flushed = false - - const stream = new Writable({ - highWaterMark: 1, - write (data, cb) { - setImmediate(function () { - flushed = true - cb(null) - }) - } - }) - - t.notOk(stream.write('a')) - t.notOk(flushed) - stream.on('drain', function () { - t.ok(flushed) - t.end() - }) -}) - -tape('writev', function (t) { - const expected = [[], ['ho']] - - const s = new Writable({ - writev (batch, cb) { - t.same(batch, expected.shift()) - cb(null) - } - }) - - for (let i = 0; i < 100; i++) { - expected[0].push('hi-' + i) - s.write('hi-' + i) - } - - s.on('drain', function () { - s.write('ho') - s.end() - }) - - s.on('finish', function () { - t.end() - }) -}) diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE deleted file mode 100644 index 778edb2..0000000 --- a/node_modules/string_decoder/LICENSE +++ /dev/null @@ -1,48 +0,0 @@ -Node.js is licensed for use as follows: - -""" -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - -This license applies to parts of Node.js originating from the -https://github.com/joyent/node repository: - -""" -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -""" - diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md deleted file mode 100644 index 5fd5831..0000000 --- a/node_modules/string_decoder/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# string_decoder - -***Node-core v8.9.4 string_decoder for userland*** - - -[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) -[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) - - -```bash -npm install --save string_decoder -``` - -***Node-core string_decoder for userland*** - -This package is a mirror of the string_decoder implementation in Node-core. - -Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). - -As of version 1.0.0 **string_decoder** uses semantic versioning. - -## Previous versions - -Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. - -## Update - -The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. - -## Streams Working Group - -`string_decoder` is maintained by the Streams Working Group, which -oversees the development and maintenance of the Streams API within -Node.js. The responsibilities of the Streams Working Group include: - -* Addressing stream issues on the Node.js issue tracker. -* Authoring and editing stream documentation within the Node.js project. -* Reviewing changes to stream subclasses within the Node.js project. -* Redirecting changes to streams from the Node.js project to this - project. -* Assisting in the implementation of stream providers within Node.js. -* Recommending versions of `readable-stream` to be included in Node.js. -* Messaging about the future of streams to give the community advance - notice of changes. - -See [readable-stream](https://github.com/nodejs/readable-stream) for -more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js deleted file mode 100644 index 2e89e63..0000000 --- a/node_modules/string_decoder/lib/string_decoder.js +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - -/**/ - -var Buffer = require('safe-buffer').Buffer; -/**/ - -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; - -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } - } -}; - -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; -} - -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.StringDecoder = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} - -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; - -StringDecoder.prototype.end = utf8End; - -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; - -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; - -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} - -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; - } - return 0; -} - -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; - } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } - } - } -} - -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; -} - -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} - -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} - -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} - -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); - } - return r; -} - -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString('base64', i, buf.length - n); -} - -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} - -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} - -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} \ No newline at end of file diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json deleted file mode 100644 index d6c62b0..0000000 --- a/node_modules/string_decoder/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "_from": "string_decoder@^1.1.1", - "_id": "string_decoder@1.3.0", - "_inBundle": false, - "_integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "_location": "/string_decoder", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "string_decoder@^1.1.1", - "name": "string_decoder", - "escapedName": "string_decoder", - "rawSpec": "^1.1.1", - "saveSpec": null, - "fetchSpec": "^1.1.1" - }, - "_requiredBy": [ - "/browserify", - "/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "_shasum": "42f114594a46cf1a8e30b0a84f56c78c3edac21e", - "_spec": "string_decoder@^1.1.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/nodejs/string_decoder/issues" - }, - "bundleDependencies": false, - "dependencies": { - "safe-buffer": "~5.2.0" - }, - "deprecated": false, - "description": "The string_decoder module from Node core", - "devDependencies": { - "babel-polyfill": "^6.23.0", - "core-util-is": "^1.0.2", - "inherits": "^2.0.3", - "tap": "~0.4.8" - }, - "files": [ - "lib" - ], - "homepage": "https://github.com/nodejs/string_decoder", - "keywords": [ - "string", - "decoder", - "browser", - "browserify" - ], - "license": "MIT", - "main": "lib/string_decoder.js", - "name": "string_decoder", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/string_decoder.git" - }, - "scripts": { - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", - "test": "tap test/parallel/*.js && node test/verify-dependencies" - }, - "version": "1.3.0" -} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md deleted file mode 100644 index acc8675..0000000 --- a/node_modules/util-deprecate/History.md +++ /dev/null @@ -1,16 +0,0 @@ - -1.0.2 / 2015-10-07 -================== - - * use try/catch when checking `localStorage` (#3, @kumavis) - -1.0.1 / 2014-11-25 -================== - - * browser: use `console.warn()` for deprecation calls - * browser: more jsdocs - -1.0.0 / 2014-04-30 -================== - - * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE deleted file mode 100644 index 6a60e8c..0000000 --- a/node_modules/util-deprecate/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md deleted file mode 100644 index 75622fa..0000000 --- a/node_modules/util-deprecate/README.md +++ /dev/null @@ -1,53 +0,0 @@ -util-deprecate -============== -### The Node.js `util.deprecate()` function with browser support - -In Node.js, this module simply re-exports the `util.deprecate()` function. - -In the web browser (i.e. via browserify), a browser-specific implementation -of the `util.deprecate()` function is used. - - -## API - -A `deprecate()` function is the only thing exposed by this module. - -``` javascript -// setup: -exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); - - -// users see: -foo(); -// foo() is deprecated, use bar() instead -foo(); -foo(); -``` - - -## License - -(The MIT License) - -Copyright (c) 2014 Nathan Rajlich - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js deleted file mode 100644 index 549ae2f..0000000 --- a/node_modules/util-deprecate/browser.js +++ /dev/null @@ -1,67 +0,0 @@ - -/** - * Module exports. - */ - -module.exports = deprecate; - -/** - * Mark that a method should not be used. - * Returns a modified function which warns once by default. - * - * If `localStorage.noDeprecation = true` is set, then it is a no-op. - * - * If `localStorage.throwDeprecation = true` is set, then deprecated functions - * will throw an Error when invoked. - * - * If `localStorage.traceDeprecation = true` is set, then deprecated functions - * will invoke `console.trace()` instead of `console.error()`. - * - * @param {Function} fn - the function to deprecate - * @param {String} msg - the string to print to the console when `fn` is invoked - * @returns {Function} a new "deprecated" version of `fn` - * @api public - */ - -function deprecate (fn, msg) { - if (config('noDeprecation')) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (config('throwDeprecation')) { - throw new Error(msg); - } else if (config('traceDeprecation')) { - console.trace(msg); - } else { - console.warn(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -} - -/** - * Checks `localStorage` for boolean values for the given `name`. - * - * @param {String} name - * @returns {Boolean} - * @api private - */ - -function config (name) { - // accessing global.localStorage can trigger a DOMException in sandboxed iframes - try { - if (!global.localStorage) return false; - } catch (_) { - return false; - } - var val = global.localStorage[name]; - if (null == val) return false; - return String(val).toLowerCase() === 'true'; -} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js deleted file mode 100644 index 5e6fcff..0000000 --- a/node_modules/util-deprecate/node.js +++ /dev/null @@ -1,6 +0,0 @@ - -/** - * For Node.js, simply re-export the core `util.deprecate` function. - */ - -module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json deleted file mode 100644 index b14df26..0000000 --- a/node_modules/util-deprecate/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "_from": "util-deprecate@^1.0.1", - "_id": "util-deprecate@1.0.2", - "_inBundle": false, - "_integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "_location": "/util-deprecate", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "util-deprecate@^1.0.1", - "name": "util-deprecate", - "escapedName": "util-deprecate", - "rawSpec": "^1.0.1", - "saveSpec": null, - "fetchSpec": "^1.0.1" - }, - "_requiredBy": [ - "/bl/readable-stream", - "/browserify/readable-stream", - "/concat-stream/readable-stream", - "/duplexer2/readable-stream", - "/from2/readable-stream", - "/module-deps/readable-stream", - "/read-only-stream/readable-stream", - "/readable-stream", - "/readdirp/readable-stream", - "/stdout-stream/readable-stream", - "/stream-browserify/readable-stream", - "/stream-combiner2/readable-stream", - "/stream-splicer/readable-stream", - "/through2/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_spec": "util-deprecate@^1.0.1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, - "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "The Node.js `util.deprecate()` function with browser support", - "homepage": "https://github.com/TooTallNate/util-deprecate", - "keywords": [ - "util", - "deprecate", - "browserify", - "browser", - "node" - ], - "license": "MIT", - "main": "node.js", - "name": "util-deprecate", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "version": "1.0.2" -} diff --git a/node_modules/wrappy/LICENSE b/node_modules/wrappy/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/wrappy/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md deleted file mode 100644 index 98eab25..0000000 --- a/node_modules/wrappy/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# wrappy - -Callback wrapping utility - -## USAGE - -```javascript -var wrappy = require("wrappy") - -// var wrapper = wrappy(wrapperFunction) - -// make sure a cb is called only once -// See also: http://npm.im/once for this specific use case -var once = wrappy(function (cb) { - var called = false - return function () { - if (called) return - called = true - return cb.apply(this, arguments) - } -}) - -function printBoo () { - console.log('boo') -} -// has some rando property -printBoo.iAmBooPrinter = true - -var onlyPrintOnce = once(printBoo) - -onlyPrintOnce() // prints 'boo' -onlyPrintOnce() // does nothing - -// random property is retained! -assert.equal(onlyPrintOnce.iAmBooPrinter, true) -``` diff --git a/node_modules/wrappy/package.json b/node_modules/wrappy/package.json deleted file mode 100644 index ae7e544..0000000 --- a/node_modules/wrappy/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_from": "wrappy@1", - "_id": "wrappy@1.0.2", - "_inBundle": false, - "_integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "_location": "/wrappy", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "wrappy@1", - "name": "wrappy", - "escapedName": "wrappy", - "rawSpec": "1", - "saveSpec": null, - "fetchSpec": "1" - }, - "_requiredBy": [ - "/inflight", - "/map-limit/once", - "/once" - ], - "_resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "_shasum": "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", - "_spec": "wrappy@1", - "_where": "/Users/bret/repos/deploy-to-neocities/node_modules/once", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/npm/wrappy/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Callback wrapping utility", - "devDependencies": { - "tap": "^2.3.1" - }, - "directories": { - "test": "test" - }, - "files": [ - "wrappy.js" - ], - "homepage": "https://github.com/npm/wrappy", - "license": "ISC", - "main": "wrappy.js", - "name": "wrappy", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/wrappy.git" - }, - "scripts": { - "test": "tap --coverage test/*.js" - }, - "version": "1.0.2" -} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js deleted file mode 100644 index bb7e7d6..0000000 --- a/node_modules/wrappy/wrappy.js +++ /dev/null @@ -1,33 +0,0 @@ -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} diff --git a/package.json b/package.json index 96a6002..fd71fe2 100644 --- a/package.json +++ b/package.json @@ -10,13 +10,14 @@ "test:standard": "standard", "release": "git push --follow-tags && gh-release", "version": "run-s version:*", - "version:1-changelog": "auto-changelog -p --template keepachangelog auto-changelog --breaking-pattern 'BREAKING:' && git add CHANGELOG.md", - "version:2-cleandeps": "mv node_modules tmp_modules && npm i --only=prod && git add node_modules --force", + "version:changelog": "auto-changelog -p --template keepachangelog auto-changelog --breaking-pattern 'BREAKING:' && git add CHANGELOG.md", + "version:build": "npm run build:action && git add dist", "postversion": "rm -rf node_modules && mv tmp_modules node_modules", "clean": "rimraf public && mkdirp public", "build": "npm run clean && run-p build:*", "build:md": "sitedown . -b public -l layout.html", "build:static": "cpx './**/*.{png,ico}' public", + "build:action": "rm -rf dist && n "start": "npm run watch", "watch": "npm run clean && run-p watch:*", "watch:js": "budo --dir public --live --open", @@ -42,7 +43,8 @@ "gh-release": "^3.5.0", "npm-run-all": "^4.1.5", "standard": "^14.3.1", - "sitedown": "^4.0.0" + "sitedown": "^4.0.0", + "@zeit/ncc": "^0.21.1" }, "dependencies": { "@actions/core": "1.2.2",