From c84f4074584381b5d1dee935d3f7e8c133e0e3a9 Mon Sep 17 00:00:00 2001 From: Simon Martens Date: Wed, 4 Dec 2024 17:29:29 +0100 Subject: [PATCH] WTF no node modules here --- node_modules/.bin/nanoid | 1 - node_modules/.bin/postcss | 1 - node_modules/.bin/yaml | 1 - node_modules/.package-lock.json | 910 ---------- node_modules/@nodelib/fs.scandir/LICENSE | 21 - node_modules/@nodelib/fs.scandir/README.md | 171 -- .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 20 - .../@nodelib/fs.scandir/out/adapters/fs.js | 19 - .../@nodelib/fs.scandir/out/constants.d.ts | 4 - .../@nodelib/fs.scandir/out/constants.js | 17 - .../@nodelib/fs.scandir/out/index.d.ts | 12 - node_modules/@nodelib/fs.scandir/out/index.js | 26 - .../fs.scandir/out/providers/async.d.ts | 7 - .../fs.scandir/out/providers/async.js | 104 -- .../fs.scandir/out/providers/common.d.ts | 1 - .../fs.scandir/out/providers/common.js | 13 - .../fs.scandir/out/providers/sync.d.ts | 5 - .../@nodelib/fs.scandir/out/providers/sync.js | 54 - .../@nodelib/fs.scandir/out/settings.d.ts | 20 - .../@nodelib/fs.scandir/out/settings.js | 24 - .../@nodelib/fs.scandir/out/types/index.d.ts | 20 - .../@nodelib/fs.scandir/out/types/index.js | 2 - .../@nodelib/fs.scandir/out/utils/fs.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/fs.js | 19 - .../@nodelib/fs.scandir/out/utils/index.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/index.js | 5 - node_modules/@nodelib/fs.scandir/package.json | 44 - node_modules/@nodelib/fs.stat/LICENSE | 21 - node_modules/@nodelib/fs.stat/README.md | 126 -- .../@nodelib/fs.stat/out/adapters/fs.d.ts | 13 - .../@nodelib/fs.stat/out/adapters/fs.js | 17 - node_modules/@nodelib/fs.stat/out/index.d.ts | 12 - node_modules/@nodelib/fs.stat/out/index.js | 26 - .../@nodelib/fs.stat/out/providers/async.d.ts | 4 - .../@nodelib/fs.stat/out/providers/async.js | 36 - .../@nodelib/fs.stat/out/providers/sync.d.ts | 3 - .../@nodelib/fs.stat/out/providers/sync.js | 23 - .../@nodelib/fs.stat/out/settings.d.ts | 16 - node_modules/@nodelib/fs.stat/out/settings.js | 16 - .../@nodelib/fs.stat/out/types/index.d.ts | 4 - .../@nodelib/fs.stat/out/types/index.js | 2 - node_modules/@nodelib/fs.stat/package.json | 37 - node_modules/@nodelib/fs.walk/LICENSE | 21 - node_modules/@nodelib/fs.walk/README.md | 215 --- node_modules/@nodelib/fs.walk/out/index.d.ts | 14 - node_modules/@nodelib/fs.walk/out/index.js | 34 - .../@nodelib/fs.walk/out/providers/async.d.ts | 12 - .../@nodelib/fs.walk/out/providers/async.js | 30 - .../@nodelib/fs.walk/out/providers/index.d.ts | 4 - .../@nodelib/fs.walk/out/providers/index.js | 9 - .../fs.walk/out/providers/stream.d.ts | 12 - .../@nodelib/fs.walk/out/providers/stream.js | 34 - .../@nodelib/fs.walk/out/providers/sync.d.ts | 10 - .../@nodelib/fs.walk/out/providers/sync.js | 14 - .../@nodelib/fs.walk/out/readers/async.d.ts | 30 - .../@nodelib/fs.walk/out/readers/async.js | 97 -- .../@nodelib/fs.walk/out/readers/common.d.ts | 7 - .../@nodelib/fs.walk/out/readers/common.js | 31 - .../@nodelib/fs.walk/out/readers/reader.d.ts | 6 - .../@nodelib/fs.walk/out/readers/reader.js | 11 - .../@nodelib/fs.walk/out/readers/sync.d.ts | 15 - .../@nodelib/fs.walk/out/readers/sync.js | 59 - .../@nodelib/fs.walk/out/settings.d.ts | 30 - node_modules/@nodelib/fs.walk/out/settings.js | 26 - .../@nodelib/fs.walk/out/types/index.d.ts | 8 - .../@nodelib/fs.walk/out/types/index.js | 2 - node_modules/@nodelib/fs.walk/package.json | 44 - .../@sindresorhus/merge-streams/index.d.ts | 44 - .../@sindresorhus/merge-streams/index.js | 223 --- .../@sindresorhus/merge-streams/license | 9 - .../@sindresorhus/merge-streams/package.json | 49 - .../@sindresorhus/merge-streams/readme.md | 53 - node_modules/ansi-regex/index.d.ts | 37 - node_modules/ansi-regex/index.js | 10 - node_modules/ansi-regex/license | 9 - node_modules/ansi-regex/package.json | 55 - node_modules/ansi-regex/readme.md | 78 - node_modules/ansi-styles/index.d.ts | 345 ---- node_modules/ansi-styles/index.js | 163 -- node_modules/ansi-styles/license | 9 - node_modules/ansi-styles/package.json | 56 - node_modules/ansi-styles/readme.md | 152 -- node_modules/anymatch/LICENSE | 15 - node_modules/anymatch/README.md | 87 - node_modules/anymatch/index.d.ts | 20 - node_modules/anymatch/index.js | 104 -- node_modules/anymatch/package.json | 48 - .../binary-extensions/binary-extensions.json | 263 --- .../binary-extensions.json.d.ts | 3 - node_modules/binary-extensions/index.d.ts | 14 - node_modules/binary-extensions/index.js | 1 - node_modules/binary-extensions/license | 10 - node_modules/binary-extensions/package.json | 40 - node_modules/binary-extensions/readme.md | 25 - node_modules/braces/LICENSE | 21 - node_modules/braces/README.md | 586 ------- node_modules/braces/index.js | 170 -- node_modules/braces/lib/compile.js | 60 - node_modules/braces/lib/constants.js | 57 - node_modules/braces/lib/expand.js | 113 -- node_modules/braces/lib/parse.js | 331 ---- node_modules/braces/lib/stringify.js | 32 - node_modules/braces/lib/utils.js | 122 -- node_modules/braces/package.json | 77 - node_modules/chokidar/LICENSE | 21 - node_modules/chokidar/README.md | 308 ---- node_modules/chokidar/index.js | 973 ----------- node_modules/chokidar/lib/constants.js | 66 - node_modules/chokidar/lib/fsevents-handler.js | 526 ------ node_modules/chokidar/lib/nodefs-handler.js | 654 ------- node_modules/chokidar/package.json | 70 - node_modules/chokidar/types/index.d.ts | 192 --- node_modules/cliui/CHANGELOG.md | 139 -- node_modules/cliui/LICENSE.txt | 14 - node_modules/cliui/README.md | 141 -- node_modules/cliui/build/index.cjs | 302 ---- node_modules/cliui/build/index.d.cts | 43 - node_modules/cliui/build/lib/index.js | 287 ---- node_modules/cliui/build/lib/string-utils.js | 27 - node_modules/cliui/index.mjs | 13 - node_modules/cliui/package.json | 83 - node_modules/color-convert/CHANGELOG.md | 54 - node_modules/color-convert/LICENSE | 21 - node_modules/color-convert/README.md | 68 - node_modules/color-convert/conversions.js | 839 --------- node_modules/color-convert/index.js | 81 - node_modules/color-convert/package.json | 48 - node_modules/color-convert/route.js | 97 -- node_modules/color-name/LICENSE | 8 - node_modules/color-name/README.md | 11 - node_modules/color-name/index.js | 152 -- node_modules/color-name/package.json | 28 - node_modules/dependency-graph/CHANGELOG.md | 82 - node_modules/dependency-graph/LICENSE | 19 - node_modules/dependency-graph/README.md | 78 - .../dependency-graph/lib/dep_graph.js | 364 ---- node_modules/dependency-graph/lib/index.d.ts | 127 -- node_modules/dependency-graph/package.json | 31 - .../dependency-graph/specs/dep_graph_spec.js | 542 ------ node_modules/emoji-regex/LICENSE-MIT.txt | 20 - node_modules/emoji-regex/README.md | 73 - node_modules/emoji-regex/es2015/index.js | 6 - node_modules/emoji-regex/es2015/text.js | 6 - node_modules/emoji-regex/index.d.ts | 23 - node_modules/emoji-regex/index.js | 6 - node_modules/emoji-regex/package.json | 50 - node_modules/emoji-regex/text.js | 6 - node_modules/escalade/dist/index.js | 22 - node_modules/escalade/dist/index.mjs | 22 - node_modules/escalade/index.d.mts | 11 - node_modules/escalade/index.d.ts | 15 - node_modules/escalade/license | 9 - node_modules/escalade/package.json | 74 - node_modules/escalade/readme.md | 211 --- node_modules/escalade/sync/index.d.mts | 9 - node_modules/escalade/sync/index.d.ts | 13 - node_modules/escalade/sync/index.js | 18 - node_modules/escalade/sync/index.mjs | 18 - node_modules/fast-glob/LICENSE | 21 - node_modules/fast-glob/README.md | 830 --------- node_modules/fast-glob/out/index.d.ts | 40 - node_modules/fast-glob/out/index.js | 102 -- .../fast-glob/out/managers/tasks.d.ts | 22 - node_modules/fast-glob/out/managers/tasks.js | 110 -- .../fast-glob/out/providers/async.d.ts | 9 - node_modules/fast-glob/out/providers/async.js | 23 - .../fast-glob/out/providers/filters/deep.d.ts | 16 - .../fast-glob/out/providers/filters/deep.js | 62 - .../out/providers/filters/entry.d.ts | 16 - .../fast-glob/out/providers/filters/entry.js | 63 - .../out/providers/filters/error.d.ts | 8 - .../fast-glob/out/providers/filters/error.js | 15 - .../out/providers/matchers/matcher.d.ts | 33 - .../out/providers/matchers/matcher.js | 45 - .../out/providers/matchers/partial.d.ts | 4 - .../out/providers/matchers/partial.js | 38 - .../fast-glob/out/providers/provider.d.ts | 19 - .../fast-glob/out/providers/provider.js | 48 - .../fast-glob/out/providers/stream.d.ts | 11 - .../fast-glob/out/providers/stream.js | 31 - .../fast-glob/out/providers/sync.d.ts | 9 - node_modules/fast-glob/out/providers/sync.js | 23 - .../out/providers/transformers/entry.d.ts | 8 - .../out/providers/transformers/entry.js | 26 - node_modules/fast-glob/out/readers/async.d.ts | 10 - node_modules/fast-glob/out/readers/async.js | 35 - .../fast-glob/out/readers/reader.d.ts | 15 - node_modules/fast-glob/out/readers/reader.js | 33 - .../fast-glob/out/readers/stream.d.ts | 14 - node_modules/fast-glob/out/readers/stream.js | 55 - node_modules/fast-glob/out/readers/sync.d.ts | 12 - node_modules/fast-glob/out/readers/sync.js | 43 - node_modules/fast-glob/out/settings.d.ts | 164 -- node_modules/fast-glob/out/settings.js | 59 - node_modules/fast-glob/out/types/index.d.ts | 31 - node_modules/fast-glob/out/types/index.js | 2 - node_modules/fast-glob/out/utils/array.d.ts | 2 - node_modules/fast-glob/out/utils/array.js | 22 - node_modules/fast-glob/out/utils/errno.d.ts | 2 - node_modules/fast-glob/out/utils/errno.js | 7 - node_modules/fast-glob/out/utils/fs.d.ts | 4 - node_modules/fast-glob/out/utils/fs.js | 19 - node_modules/fast-glob/out/utils/index.d.ts | 8 - node_modules/fast-glob/out/utils/index.js | 17 - node_modules/fast-glob/out/utils/path.d.ts | 13 - node_modules/fast-glob/out/utils/path.js | 68 - node_modules/fast-glob/out/utils/pattern.d.ts | 47 - node_modules/fast-glob/out/utils/pattern.js | 188 -- node_modules/fast-glob/out/utils/stream.d.ts | 4 - node_modules/fast-glob/out/utils/stream.js | 17 - node_modules/fast-glob/out/utils/string.d.ts | 2 - node_modules/fast-glob/out/utils/string.js | 11 - node_modules/fast-glob/package.json | 81 - node_modules/fastq/.github/dependabot.yml | 11 - node_modules/fastq/.github/workflows/ci.yml | 75 - node_modules/fastq/LICENSE | 13 - node_modules/fastq/README.md | 306 ---- node_modules/fastq/bench.js | 66 - node_modules/fastq/example.js | 14 - node_modules/fastq/example.mjs | 11 - node_modules/fastq/index.d.ts | 38 - node_modules/fastq/package.json | 53 - node_modules/fastq/queue.js | 311 ---- node_modules/fastq/test/example.ts | 83 - node_modules/fastq/test/promise.js | 248 --- node_modules/fastq/test/test.js | 642 ------- node_modules/fastq/test/tsconfig.json | 11 - node_modules/fill-range/LICENSE | 21 - node_modules/fill-range/README.md | 237 --- node_modules/fill-range/index.js | 248 --- node_modules/fill-range/package.json | 74 - node_modules/fs-extra/LICENSE | 15 - node_modules/fs-extra/README.md | 292 ---- node_modules/fs-extra/lib/copy/copy-sync.js | 161 -- node_modules/fs-extra/lib/copy/copy.js | 177 -- node_modules/fs-extra/lib/copy/index.js | 7 - node_modules/fs-extra/lib/empty/index.js | 39 - node_modules/fs-extra/lib/ensure/file.js | 66 - node_modules/fs-extra/lib/ensure/index.js | 23 - node_modules/fs-extra/lib/ensure/link.js | 64 - .../fs-extra/lib/ensure/symlink-paths.js | 101 -- .../fs-extra/lib/ensure/symlink-type.js | 34 - node_modules/fs-extra/lib/ensure/symlink.js | 67 - node_modules/fs-extra/lib/esm.mjs | 68 - node_modules/fs-extra/lib/fs/index.js | 140 -- node_modules/fs-extra/lib/index.js | 16 - node_modules/fs-extra/lib/json/index.js | 16 - node_modules/fs-extra/lib/json/jsonfile.js | 11 - .../fs-extra/lib/json/output-json-sync.js | 12 - node_modules/fs-extra/lib/json/output-json.js | 12 - node_modules/fs-extra/lib/mkdirs/index.js | 14 - node_modules/fs-extra/lib/mkdirs/make-dir.js | 27 - node_modules/fs-extra/lib/mkdirs/utils.js | 21 - node_modules/fs-extra/lib/move/index.js | 7 - node_modules/fs-extra/lib/move/move-sync.js | 55 - node_modules/fs-extra/lib/move/move.js | 59 - .../fs-extra/lib/output-file/index.js | 31 - .../fs-extra/lib/path-exists/index.js | 12 - node_modules/fs-extra/lib/remove/index.js | 17 - node_modules/fs-extra/lib/util/stat.js | 158 -- node_modules/fs-extra/lib/util/utimes.js | 36 - node_modules/fs-extra/package.json | 71 - node_modules/get-caller-file/LICENSE.md | 6 - node_modules/get-caller-file/README.md | 41 - node_modules/get-caller-file/index.d.ts | 2 - node_modules/get-caller-file/index.js | 22 - node_modules/get-caller-file/index.js.map | 1 - node_modules/get-caller-file/package.json | 42 - node_modules/get-stdin/index.d.ts | 29 - node_modules/get-stdin/index.js | 33 - node_modules/get-stdin/license | 9 - node_modules/get-stdin/package.json | 42 - node_modules/get-stdin/readme.md | 56 - node_modules/glob-parent/CHANGELOG.md | 110 -- node_modules/glob-parent/LICENSE | 15 - node_modules/glob-parent/README.md | 137 -- node_modules/glob-parent/index.js | 42 - node_modules/glob-parent/package.json | 48 - node_modules/globby/ignore.js | 109 -- node_modules/globby/index.d.ts | 207 --- node_modules/globby/index.js | 264 --- node_modules/globby/license | 9 - node_modules/globby/package.json | 94 - node_modules/globby/readme.md | 177 -- node_modules/globby/utilities.js | 1 - node_modules/graceful-fs/LICENSE | 15 - node_modules/graceful-fs/README.md | 143 -- node_modules/graceful-fs/clone.js | 23 - node_modules/graceful-fs/graceful-fs.js | 448 ----- node_modules/graceful-fs/legacy-streams.js | 118 -- node_modules/graceful-fs/package.json | 53 - node_modules/graceful-fs/polyfills.js | 355 ---- node_modules/ignore/LICENSE-MIT | 21 - node_modules/ignore/README.md | 412 ----- node_modules/ignore/index.d.ts | 61 - node_modules/ignore/index.js | 636 ------- node_modules/ignore/legacy.js | 559 ------ node_modules/ignore/package.json | 74 - node_modules/is-binary-path/index.d.ts | 17 - node_modules/is-binary-path/index.js | 7 - node_modules/is-binary-path/license | 9 - node_modules/is-binary-path/package.json | 40 - node_modules/is-binary-path/readme.md | 34 - node_modules/is-extglob/LICENSE | 21 - node_modules/is-extglob/README.md | 107 -- node_modules/is-extglob/index.js | 20 - node_modules/is-extglob/package.json | 69 - .../is-fullwidth-code-point/index.d.ts | 17 - node_modules/is-fullwidth-code-point/index.js | 50 - node_modules/is-fullwidth-code-point/license | 9 - .../is-fullwidth-code-point/package.json | 42 - .../is-fullwidth-code-point/readme.md | 39 - node_modules/is-glob/LICENSE | 21 - node_modules/is-glob/README.md | 206 --- node_modules/is-glob/index.js | 150 -- node_modules/is-glob/package.json | 81 - node_modules/is-number/LICENSE | 21 - node_modules/is-number/README.md | 187 -- node_modules/is-number/index.js | 18 - node_modules/is-number/package.json | 82 - node_modules/jsonfile/CHANGELOG.md | 171 -- node_modules/jsonfile/LICENSE | 15 - node_modules/jsonfile/README.md | 230 --- node_modules/jsonfile/index.js | 88 - node_modules/jsonfile/package.json | 40 - node_modules/jsonfile/utils.js | 14 - node_modules/lilconfig/LICENSE | 21 - node_modules/lilconfig/package.json | 42 - node_modules/lilconfig/readme.md | 98 -- node_modules/lilconfig/src/index.d.ts | 54 - node_modules/lilconfig/src/index.js | 460 ----- node_modules/merge2/LICENSE | 21 - node_modules/merge2/README.md | 144 -- node_modules/merge2/index.js | 144 -- node_modules/merge2/package.json | 43 - node_modules/micromatch/LICENSE | 21 - node_modules/micromatch/README.md | 1024 ----------- node_modules/micromatch/index.js | 474 ------ node_modules/micromatch/package.json | 119 -- node_modules/nanoid/.devcontainer.json | 23 - node_modules/nanoid/LICENSE | 20 - node_modules/nanoid/README.md | 554 ------ node_modules/nanoid/async/index.browser.cjs | 69 - node_modules/nanoid/async/index.browser.js | 69 - node_modules/nanoid/async/index.cjs | 71 - node_modules/nanoid/async/index.d.ts | 56 - node_modules/nanoid/async/index.js | 71 - node_modules/nanoid/async/index.native.js | 57 - node_modules/nanoid/async/package.json | 12 - node_modules/nanoid/bin/nanoid.cjs | 55 - node_modules/nanoid/index.browser.cjs | 72 - node_modules/nanoid/index.browser.js | 72 - node_modules/nanoid/index.cjs | 85 - node_modules/nanoid/index.d.cts | 91 - node_modules/nanoid/index.d.ts | 91 - node_modules/nanoid/index.js | 85 - node_modules/nanoid/nanoid.js | 1 - node_modules/nanoid/non-secure/index.cjs | 34 - node_modules/nanoid/non-secure/index.d.ts | 33 - node_modules/nanoid/non-secure/index.js | 34 - node_modules/nanoid/non-secure/package.json | 6 - node_modules/nanoid/package.json | 88 - node_modules/nanoid/url-alphabet/index.cjs | 7 - node_modules/nanoid/url-alphabet/index.js | 7 - node_modules/nanoid/url-alphabet/package.json | 6 - node_modules/normalize-path/LICENSE | 21 - node_modules/normalize-path/README.md | 127 -- node_modules/normalize-path/index.js | 35 - node_modules/normalize-path/package.json | 77 - node_modules/path-type/index.d.ts | 51 - node_modules/path-type/index.js | 41 - node_modules/path-type/license | 9 - node_modules/path-type/package.json | 47 - node_modules/path-type/readme.md | 74 - node_modules/picocolors/LICENSE | 15 - node_modules/picocolors/README.md | 21 - node_modules/picocolors/package.json | 25 - node_modules/picocolors/picocolors.browser.js | 4 - node_modules/picocolors/picocolors.d.ts | 5 - node_modules/picocolors/picocolors.js | 75 - node_modules/picocolors/types.d.ts | 51 - node_modules/picomatch/CHANGELOG.md | 136 -- node_modules/picomatch/LICENSE | 21 - node_modules/picomatch/README.md | 708 -------- node_modules/picomatch/index.js | 3 - node_modules/picomatch/lib/constants.js | 179 -- node_modules/picomatch/lib/parse.js | 1091 ------------ node_modules/picomatch/lib/picomatch.js | 342 ---- node_modules/picomatch/lib/scan.js | 391 ----- node_modules/picomatch/lib/utils.js | 64 - node_modules/picomatch/package.json | 81 - node_modules/pify/index.js | 68 - node_modules/pify/license | 21 - node_modules/pify/package.json | 48 - node_modules/pify/readme.md | 119 -- node_modules/postcss-cli/LICENSE | 21 - node_modules/postcss-cli/README.md | 149 -- node_modules/postcss-cli/index.js | 354 ---- .../postcss-cli/lib/DependencyGraph.js | 30 - node_modules/postcss-cli/lib/args.js | 115 -- node_modules/postcss-cli/lib/getMapfile.js | 7 - node_modules/postcss-cli/package.json | 78 - node_modules/postcss-load-config/LICENSE | 20 - node_modules/postcss-load-config/README.md | 471 ----- node_modules/postcss-load-config/package.json | 58 - .../postcss-load-config/src/index.d.ts | 65 - node_modules/postcss-load-config/src/index.js | 164 -- .../postcss-load-config/src/options.js | 48 - .../postcss-load-config/src/plugins.js | 90 - node_modules/postcss-load-config/src/req.js | 59 - node_modules/postcss-reporter/LICENSE | 22 - node_modules/postcss-reporter/README.md | 12 - node_modules/postcss-reporter/index.js | 4 - .../postcss-reporter/lib/formatter.js | 93 - node_modules/postcss-reporter/lib/reporter.js | 111 -- node_modules/postcss-reporter/lib/util.js | 31 - node_modules/postcss-reporter/package.json | 37 - node_modules/postcss/LICENSE | 20 - node_modules/postcss/README.md | 28 - node_modules/postcss/lib/at-rule.d.ts | 140 -- node_modules/postcss/lib/at-rule.js | 25 - node_modules/postcss/lib/comment.d.ts | 68 - node_modules/postcss/lib/comment.js | 13 - node_modules/postcss/lib/container.d.ts | 483 ------ node_modules/postcss/lib/container.js | 447 ----- .../postcss/lib/css-syntax-error.d.ts | 248 --- node_modules/postcss/lib/css-syntax-error.js | 133 -- node_modules/postcss/lib/declaration.d.ts | 151 -- node_modules/postcss/lib/declaration.js | 24 - node_modules/postcss/lib/document.d.ts | 69 - node_modules/postcss/lib/document.js | 33 - node_modules/postcss/lib/fromJSON.d.ts | 9 - node_modules/postcss/lib/fromJSON.js | 54 - node_modules/postcss/lib/input.d.ts | 197 --- node_modules/postcss/lib/input.js | 248 --- node_modules/postcss/lib/lazy-result.d.ts | 190 --- node_modules/postcss/lib/lazy-result.js | 550 ------ node_modules/postcss/lib/list.d.ts | 60 - node_modules/postcss/lib/list.js | 58 - node_modules/postcss/lib/map-generator.js | 368 ---- node_modules/postcss/lib/no-work-result.d.ts | 46 - node_modules/postcss/lib/no-work-result.js | 138 -- node_modules/postcss/lib/node.d.ts | 541 ------ node_modules/postcss/lib/node.js | 425 ----- node_modules/postcss/lib/parse.d.ts | 9 - node_modules/postcss/lib/parse.js | 42 - node_modules/postcss/lib/parser.js | 609 ------- node_modules/postcss/lib/postcss.d.mts | 69 - node_modules/postcss/lib/postcss.d.ts | 453 ----- node_modules/postcss/lib/postcss.js | 101 -- node_modules/postcss/lib/postcss.mjs | 30 - node_modules/postcss/lib/previous-map.d.ts | 81 - node_modules/postcss/lib/previous-map.js | 144 -- node_modules/postcss/lib/processor.d.ts | 115 -- node_modules/postcss/lib/processor.js | 67 - node_modules/postcss/lib/result.d.ts | 205 --- node_modules/postcss/lib/result.js | 42 - node_modules/postcss/lib/root.d.ts | 87 - node_modules/postcss/lib/root.js | 61 - node_modules/postcss/lib/rule.d.ts | 126 -- node_modules/postcss/lib/rule.js | 27 - node_modules/postcss/lib/stringifier.d.ts | 46 - node_modules/postcss/lib/stringifier.js | 353 ---- node_modules/postcss/lib/stringify.d.ts | 9 - node_modules/postcss/lib/stringify.js | 11 - node_modules/postcss/lib/symbols.js | 5 - .../postcss/lib/terminal-highlight.js | 70 - node_modules/postcss/lib/tokenize.js | 266 --- node_modules/postcss/lib/warn-once.js | 13 - node_modules/postcss/lib/warning.d.ts | 147 -- node_modules/postcss/lib/warning.js | 37 - node_modules/postcss/package.json | 88 - node_modules/pretty-hrtime/.jshintignore | 1 - node_modules/pretty-hrtime/.npmignore | 10 - node_modules/pretty-hrtime/LICENSE | 20 - node_modules/pretty-hrtime/README.md | 57 - node_modules/pretty-hrtime/index.js | 80 - node_modules/pretty-hrtime/package.json | 25 - node_modules/queue-microtask/LICENSE | 20 - node_modules/queue-microtask/README.md | 90 - node_modules/queue-microtask/index.d.ts | 2 - node_modules/queue-microtask/index.js | 9 - node_modules/queue-microtask/package.json | 55 - node_modules/read-cache/LICENSE | 20 - node_modules/read-cache/README.md | 46 - node_modules/read-cache/index.js | 78 - node_modules/read-cache/package.json | 34 - node_modules/readdirp/LICENSE | 21 - node_modules/readdirp/README.md | 122 -- node_modules/readdirp/index.d.ts | 43 - node_modules/readdirp/index.js | 287 ---- node_modules/readdirp/package.json | 122 -- node_modules/require-directory/.jshintrc | 67 - node_modules/require-directory/.npmignore | 1 - node_modules/require-directory/.travis.yml | 3 - node_modules/require-directory/LICENSE | 22 - .../require-directory/README.markdown | 184 -- node_modules/require-directory/index.js | 86 - node_modules/require-directory/package.json | 40 - node_modules/reusify/.coveralls.yml | 1 - node_modules/reusify/.travis.yml | 28 - node_modules/reusify/LICENSE | 22 - node_modules/reusify/README.md | 145 -- .../benchmarks/createNoCodeFunction.js | 30 - node_modules/reusify/benchmarks/fib.js | 13 - .../reusify/benchmarks/reuseNoCodeFunction.js | 38 - node_modules/reusify/package.json | 45 - node_modules/reusify/reusify.js | 33 - node_modules/reusify/test.js | 66 - node_modules/run-parallel/LICENSE | 20 - node_modules/run-parallel/README.md | 85 - node_modules/run-parallel/index.js | 51 - node_modules/run-parallel/package.json | 58 - node_modules/slash/index.d.ts | 23 - node_modules/slash/index.js | 9 - node_modules/slash/license | 9 - node_modules/slash/package.json | 39 - node_modules/slash/readme.md | 36 - node_modules/source-map-js/LICENSE | 28 - node_modules/source-map-js/README.md | 765 --------- node_modules/source-map-js/lib/array-set.js | 121 -- node_modules/source-map-js/lib/base64-vlq.js | 140 -- node_modules/source-map-js/lib/base64.js | 67 - .../source-map-js/lib/binary-search.js | 111 -- .../source-map-js/lib/mapping-list.js | 79 - node_modules/source-map-js/lib/quick-sort.js | 132 -- .../lib/source-map-consumer.d.ts | 1 - .../source-map-js/lib/source-map-consumer.js | 1188 ------------- .../lib/source-map-generator.d.ts | 1 - .../source-map-js/lib/source-map-generator.js | 444 ----- .../source-map-js/lib/source-node.d.ts | 1 - node_modules/source-map-js/lib/source-node.js | 413 ----- node_modules/source-map-js/lib/util.js | 594 ------- node_modules/source-map-js/package.json | 71 - node_modules/source-map-js/source-map.d.ts | 104 -- node_modules/source-map-js/source-map.js | 8 - node_modules/string-width/index.d.ts | 29 - node_modules/string-width/index.js | 47 - node_modules/string-width/license | 9 - node_modules/string-width/package.json | 56 - node_modules/string-width/readme.md | 50 - node_modules/strip-ansi/index.d.ts | 17 - node_modules/strip-ansi/index.js | 4 - node_modules/strip-ansi/license | 9 - node_modules/strip-ansi/package.json | 54 - node_modules/strip-ansi/readme.md | 46 - node_modules/thenby/LICENSE.TXT | 201 --- node_modules/thenby/README.md | 159 -- node_modules/thenby/package.json | 45 - node_modules/thenby/thenBy.min.js | 2 - node_modules/thenby/thenBy.module.d.ts | 51 - node_modules/thenby/thenBy.module.js | 60 - node_modules/to-regex-range/LICENSE | 21 - node_modules/to-regex-range/README.md | 305 ---- node_modules/to-regex-range/index.js | 288 ---- node_modules/to-regex-range/package.json | 88 - node_modules/unicorn-magic/default.js | 14 - node_modules/unicorn-magic/index.d.ts | 29 - node_modules/unicorn-magic/license | 9 - node_modules/unicorn-magic/node.js | 7 - node_modules/unicorn-magic/package.json | 49 - node_modules/unicorn-magic/readme.md | 25 - node_modules/universalify/LICENSE | 20 - node_modules/universalify/README.md | 76 - node_modules/universalify/index.js | 24 - node_modules/universalify/package.json | 34 - node_modules/wrap-ansi/index.js | 216 --- node_modules/wrap-ansi/license | 9 - node_modules/wrap-ansi/package.json | 62 - node_modules/wrap-ansi/readme.md | 91 - node_modules/y18n/CHANGELOG.md | 100 -- node_modules/y18n/LICENSE | 13 - node_modules/y18n/README.md | 127 -- node_modules/y18n/build/index.cjs | 203 --- node_modules/y18n/build/lib/cjs.js | 6 - node_modules/y18n/build/lib/index.js | 174 -- .../y18n/build/lib/platform-shims/node.js | 19 - node_modules/y18n/index.mjs | 8 - node_modules/y18n/package.json | 70 - node_modules/yaml/LICENSE | 13 - node_modules/yaml/README.md | 169 -- node_modules/yaml/bin.mjs | 11 - .../dist/compose/compose-collection.js | 88 - .../yaml/browser/dist/compose/compose-doc.js | 43 - .../yaml/browser/dist/compose/compose-node.js | 102 -- .../browser/dist/compose/compose-scalar.js | 86 - .../yaml/browser/dist/compose/composer.js | 217 --- .../browser/dist/compose/resolve-block-map.js | 115 -- .../dist/compose/resolve-block-scalar.js | 198 --- .../browser/dist/compose/resolve-block-seq.js | 49 - .../yaml/browser/dist/compose/resolve-end.js | 37 - .../dist/compose/resolve-flow-collection.js | 207 --- .../dist/compose/resolve-flow-scalar.js | 223 --- .../browser/dist/compose/resolve-props.js | 148 -- .../dist/compose/util-contains-newline.js | 34 - .../compose/util-empty-scalar-position.js | 27 - .../dist/compose/util-flow-indent-check.js | 15 - .../browser/dist/compose/util-map-includes.js | 13 - .../yaml/browser/dist/doc/Document.js | 335 ---- node_modules/yaml/browser/dist/doc/anchors.js | 72 - .../yaml/browser/dist/doc/applyReviver.js | 55 - .../yaml/browser/dist/doc/createNode.js | 89 - .../yaml/browser/dist/doc/directives.js | 176 -- node_modules/yaml/browser/dist/errors.js | 57 - node_modules/yaml/browser/dist/index.js | 17 - node_modules/yaml/browser/dist/log.js | 14 - node_modules/yaml/browser/dist/nodes/Alias.js | 101 -- .../yaml/browser/dist/nodes/Collection.js | 147 -- node_modules/yaml/browser/dist/nodes/Node.js | 38 - node_modules/yaml/browser/dist/nodes/Pair.js | 36 - .../yaml/browser/dist/nodes/Scalar.js | 24 - .../yaml/browser/dist/nodes/YAMLMap.js | 144 -- .../yaml/browser/dist/nodes/YAMLSeq.js | 113 -- .../yaml/browser/dist/nodes/addPairToJSMap.js | 62 - .../yaml/browser/dist/nodes/identity.js | 36 - node_modules/yaml/browser/dist/nodes/toJS.js | 37 - .../yaml/browser/dist/parse/cst-scalar.js | 214 --- .../yaml/browser/dist/parse/cst-stringify.js | 61 - .../yaml/browser/dist/parse/cst-visit.js | 97 -- node_modules/yaml/browser/dist/parse/cst.js | 98 -- node_modules/yaml/browser/dist/parse/lexer.js | 717 -------- .../yaml/browser/dist/parse/line-counter.js | 39 - .../yaml/browser/dist/parse/parser.js | 954 ----------- node_modules/yaml/browser/dist/public-api.js | 102 -- .../yaml/browser/dist/schema/Schema.js | 37 - .../yaml/browser/dist/schema/common/map.js | 17 - .../yaml/browser/dist/schema/common/null.js | 15 - .../yaml/browser/dist/schema/common/seq.js | 17 - .../yaml/browser/dist/schema/common/string.js | 14 - .../yaml/browser/dist/schema/core/bool.js | 19 - .../yaml/browser/dist/schema/core/float.js | 43 - .../yaml/browser/dist/schema/core/int.js | 38 - .../yaml/browser/dist/schema/core/schema.js | 23 - .../yaml/browser/dist/schema/json/schema.js | 62 - node_modules/yaml/browser/dist/schema/tags.js | 96 -- .../browser/dist/schema/yaml-1.1/binary.js | 66 - .../yaml/browser/dist/schema/yaml-1.1/bool.js | 26 - .../browser/dist/schema/yaml-1.1/float.js | 46 - .../yaml/browser/dist/schema/yaml-1.1/int.js | 71 - .../browser/dist/schema/yaml-1.1/merge.js | 64 - .../yaml/browser/dist/schema/yaml-1.1/omap.js | 74 - .../browser/dist/schema/yaml-1.1/pairs.js | 78 - .../browser/dist/schema/yaml-1.1/schema.js | 39 - .../yaml/browser/dist/schema/yaml-1.1/set.js | 93 - .../browser/dist/schema/yaml-1.1/timestamp.js | 101 -- .../browser/dist/stringify/foldFlowLines.js | 146 -- .../yaml/browser/dist/stringify/stringify.js | 129 -- .../dist/stringify/stringifyCollection.js | 143 -- .../dist/stringify/stringifyComment.js | 20 - .../dist/stringify/stringifyDocument.js | 85 - .../browser/dist/stringify/stringifyNumber.js | 24 - .../browser/dist/stringify/stringifyPair.js | 150 -- .../browser/dist/stringify/stringifyString.js | 337 ---- node_modules/yaml/browser/dist/util.js | 11 - node_modules/yaml/browser/dist/visit.js | 233 --- node_modules/yaml/browser/index.js | 5 - node_modules/yaml/browser/package.json | 3 - node_modules/yaml/dist/cli.d.ts | 8 - node_modules/yaml/dist/cli.mjs | 199 --- .../yaml/dist/compose/compose-collection.d.ts | 11 - .../yaml/dist/compose/compose-collection.js | 90 - .../yaml/dist/compose/compose-doc.d.ts | 7 - node_modules/yaml/dist/compose/compose-doc.js | 45 - .../yaml/dist/compose/compose-node.d.ts | 29 - .../yaml/dist/compose/compose-node.js | 105 -- .../yaml/dist/compose/compose-scalar.d.ts | 5 - .../yaml/dist/compose/compose-scalar.js | 88 - node_modules/yaml/dist/compose/composer.d.ts | 62 - node_modules/yaml/dist/compose/composer.js | 221 --- .../yaml/dist/compose/resolve-block-map.d.ts | 6 - .../yaml/dist/compose/resolve-block-map.js | 117 -- .../dist/compose/resolve-block-scalar.d.ts | 11 - .../yaml/dist/compose/resolve-block-scalar.js | 200 --- .../yaml/dist/compose/resolve-block-seq.d.ts | 6 - .../yaml/dist/compose/resolve-block-seq.js | 51 - .../yaml/dist/compose/resolve-end.d.ts | 6 - node_modules/yaml/dist/compose/resolve-end.js | 39 - .../dist/compose/resolve-flow-collection.d.ts | 7 - .../dist/compose/resolve-flow-collection.js | 209 --- .../dist/compose/resolve-flow-scalar.d.ts | 10 - .../yaml/dist/compose/resolve-flow-scalar.js | 225 --- .../yaml/dist/compose/resolve-props.d.ts | 23 - .../yaml/dist/compose/resolve-props.js | 150 -- .../dist/compose/util-contains-newline.d.ts | 2 - .../dist/compose/util-contains-newline.js | 36 - .../compose/util-empty-scalar-position.d.ts | 2 - .../compose/util-empty-scalar-position.js | 29 - .../dist/compose/util-flow-indent-check.d.ts | 3 - .../dist/compose/util-flow-indent-check.js | 17 - .../yaml/dist/compose/util-map-includes.d.ts | 4 - .../yaml/dist/compose/util-map-includes.js | 15 - node_modules/yaml/dist/doc/Document.d.ts | 141 -- node_modules/yaml/dist/doc/Document.js | 337 ---- node_modules/yaml/dist/doc/anchors.d.ts | 24 - node_modules/yaml/dist/doc/anchors.js | 77 - node_modules/yaml/dist/doc/applyReviver.d.ts | 9 - node_modules/yaml/dist/doc/applyReviver.js | 57 - node_modules/yaml/dist/doc/createNode.d.ts | 17 - node_modules/yaml/dist/doc/createNode.js | 91 - node_modules/yaml/dist/doc/directives.d.ts | 49 - node_modules/yaml/dist/doc/directives.js | 178 -- node_modules/yaml/dist/errors.d.ts | 21 - node_modules/yaml/dist/errors.js | 62 - node_modules/yaml/dist/index.d.ts | 22 - node_modules/yaml/dist/index.js | 50 - node_modules/yaml/dist/log.d.ts | 3 - node_modules/yaml/dist/log.js | 17 - node_modules/yaml/dist/nodes/Alias.d.ts | 28 - node_modules/yaml/dist/nodes/Alias.js | 103 -- node_modules/yaml/dist/nodes/Collection.d.ts | 73 - node_modules/yaml/dist/nodes/Collection.js | 151 -- node_modules/yaml/dist/nodes/Node.d.ts | 53 - node_modules/yaml/dist/nodes/Node.js | 40 - node_modules/yaml/dist/nodes/Pair.d.ts | 21 - node_modules/yaml/dist/nodes/Pair.js | 39 - node_modules/yaml/dist/nodes/Scalar.d.ts | 42 - node_modules/yaml/dist/nodes/Scalar.js | 27 - node_modules/yaml/dist/nodes/YAMLMap.d.ts | 53 - node_modules/yaml/dist/nodes/YAMLMap.js | 147 -- node_modules/yaml/dist/nodes/YAMLSeq.d.ts | 60 - node_modules/yaml/dist/nodes/YAMLSeq.js | 115 -- .../yaml/dist/nodes/addPairToJSMap.d.ts | 4 - .../yaml/dist/nodes/addPairToJSMap.js | 64 - node_modules/yaml/dist/nodes/identity.d.ts | 23 - node_modules/yaml/dist/nodes/identity.js | 53 - node_modules/yaml/dist/nodes/toJS.d.ts | 27 - node_modules/yaml/dist/nodes/toJS.js | 39 - node_modules/yaml/dist/options.d.ts | 344 ---- node_modules/yaml/dist/parse/cst-scalar.d.ts | 64 - node_modules/yaml/dist/parse/cst-scalar.js | 218 --- .../yaml/dist/parse/cst-stringify.d.ts | 8 - node_modules/yaml/dist/parse/cst-stringify.js | 63 - node_modules/yaml/dist/parse/cst-visit.d.ts | 39 - node_modules/yaml/dist/parse/cst-visit.js | 99 -- node_modules/yaml/dist/parse/cst.d.ts | 108 -- node_modules/yaml/dist/parse/cst.js | 112 -- node_modules/yaml/dist/parse/lexer.d.ts | 87 - node_modules/yaml/dist/parse/lexer.js | 719 -------- .../yaml/dist/parse/line-counter.d.ts | 22 - node_modules/yaml/dist/parse/line-counter.js | 41 - node_modules/yaml/dist/parse/parser.d.ts | 84 - node_modules/yaml/dist/parse/parser.js | 958 ----------- node_modules/yaml/dist/public-api.d.ts | 43 - node_modules/yaml/dist/public-api.js | 107 -- node_modules/yaml/dist/schema/Schema.d.ts | 17 - node_modules/yaml/dist/schema/Schema.js | 39 - node_modules/yaml/dist/schema/common/map.d.ts | 2 - node_modules/yaml/dist/schema/common/map.js | 19 - .../yaml/dist/schema/common/null.d.ts | 4 - node_modules/yaml/dist/schema/common/null.js | 17 - node_modules/yaml/dist/schema/common/seq.d.ts | 2 - node_modules/yaml/dist/schema/common/seq.js | 19 - .../yaml/dist/schema/common/string.d.ts | 2 - .../yaml/dist/schema/common/string.js | 16 - node_modules/yaml/dist/schema/core/bool.d.ts | 4 - node_modules/yaml/dist/schema/core/bool.js | 21 - node_modules/yaml/dist/schema/core/float.d.ts | 4 - node_modules/yaml/dist/schema/core/float.js | 47 - node_modules/yaml/dist/schema/core/int.d.ts | 4 - node_modules/yaml/dist/schema/core/int.js | 42 - .../yaml/dist/schema/core/schema.d.ts | 1 - node_modules/yaml/dist/schema/core/schema.js | 25 - .../yaml/dist/schema/json-schema.d.ts | 69 - .../yaml/dist/schema/json/schema.d.ts | 2 - node_modules/yaml/dist/schema/json/schema.js | 64 - node_modules/yaml/dist/schema/tags.d.ts | 48 - node_modules/yaml/dist/schema/tags.js | 99 -- node_modules/yaml/dist/schema/types.d.ts | 92 - .../yaml/dist/schema/yaml-1.1/binary.d.ts | 2 - .../yaml/dist/schema/yaml-1.1/binary.js | 68 - .../yaml/dist/schema/yaml-1.1/bool.d.ts | 7 - .../yaml/dist/schema/yaml-1.1/bool.js | 29 - .../yaml/dist/schema/yaml-1.1/float.d.ts | 4 - .../yaml/dist/schema/yaml-1.1/float.js | 50 - .../yaml/dist/schema/yaml-1.1/int.d.ts | 5 - node_modules/yaml/dist/schema/yaml-1.1/int.js | 76 - .../yaml/dist/schema/yaml-1.1/merge.d.ts | 9 - .../yaml/dist/schema/yaml-1.1/merge.js | 68 - .../yaml/dist/schema/yaml-1.1/omap.d.ts | 28 - .../yaml/dist/schema/yaml-1.1/omap.js | 77 - .../yaml/dist/schema/yaml-1.1/pairs.d.ts | 10 - .../yaml/dist/schema/yaml-1.1/pairs.js | 82 - .../yaml/dist/schema/yaml-1.1/schema.d.ts | 1 - .../yaml/dist/schema/yaml-1.1/schema.js | 41 - .../yaml/dist/schema/yaml-1.1/set.d.ts | 28 - node_modules/yaml/dist/schema/yaml-1.1/set.js | 96 -- .../yaml/dist/schema/yaml-1.1/timestamp.d.ts | 6 - .../yaml/dist/schema/yaml-1.1/timestamp.js | 105 -- .../yaml/dist/stringify/foldFlowLines.d.ts | 34 - .../yaml/dist/stringify/foldFlowLines.js | 151 -- .../yaml/dist/stringify/stringify.d.ts | 21 - node_modules/yaml/dist/stringify/stringify.js | 132 -- .../dist/stringify/stringifyCollection.d.ts | 17 - .../dist/stringify/stringifyCollection.js | 145 -- .../yaml/dist/stringify/stringifyComment.d.ts | 10 - .../yaml/dist/stringify/stringifyComment.js | 24 - .../dist/stringify/stringifyDocument.d.ts | 4 - .../yaml/dist/stringify/stringifyDocument.js | 87 - .../yaml/dist/stringify/stringifyNumber.d.ts | 2 - .../yaml/dist/stringify/stringifyNumber.js | 26 - .../yaml/dist/stringify/stringifyPair.d.ts | 3 - .../yaml/dist/stringify/stringifyPair.js | 152 -- .../yaml/dist/stringify/stringifyString.d.ts | 9 - .../yaml/dist/stringify/stringifyString.js | 339 ---- node_modules/yaml/dist/test-events.d.ts | 4 - node_modules/yaml/dist/test-events.js | 134 -- node_modules/yaml/dist/util.d.ts | 12 - node_modules/yaml/dist/util.js | 28 - node_modules/yaml/dist/visit.d.ts | 102 -- node_modules/yaml/dist/visit.js | 236 --- node_modules/yaml/package.json | 96 -- node_modules/yaml/util.js | 2 - node_modules/yargs-parser/CHANGELOG.md | 308 ---- node_modules/yargs-parser/LICENSE.txt | 14 - node_modules/yargs-parser/README.md | 518 ------ node_modules/yargs-parser/browser.js | 29 - node_modules/yargs-parser/build/index.cjs | 1050 ------------ node_modules/yargs-parser/build/lib/index.js | 62 - .../yargs-parser/build/lib/string-utils.js | 65 - .../build/lib/tokenize-arg-string.js | 40 - .../build/lib/yargs-parser-types.js | 12 - .../yargs-parser/build/lib/yargs-parser.js | 1045 ------------ node_modules/yargs-parser/package.json | 92 - node_modules/yargs/LICENSE | 21 - node_modules/yargs/README.md | 204 --- node_modules/yargs/browser.d.ts | 5 - node_modules/yargs/browser.mjs | 7 - node_modules/yargs/build/index.cjs | 1 - node_modules/yargs/build/lib/argsert.js | 62 - node_modules/yargs/build/lib/command.js | 449 ----- .../yargs/build/lib/completion-templates.js | 48 - node_modules/yargs/build/lib/completion.js | 243 --- node_modules/yargs/build/lib/middleware.js | 88 - node_modules/yargs/build/lib/parse-command.js | 32 - .../yargs/build/lib/typings/common-types.js | 9 - .../build/lib/typings/yargs-parser-types.js | 1 - node_modules/yargs/build/lib/usage.js | 584 ------- .../yargs/build/lib/utils/apply-extends.js | 59 - .../yargs/build/lib/utils/is-promise.js | 5 - .../yargs/build/lib/utils/levenshtein.js | 34 - .../build/lib/utils/maybe-async-result.js | 17 - .../yargs/build/lib/utils/obj-filter.js | 10 - .../yargs/build/lib/utils/process-argv.js | 17 - .../yargs/build/lib/utils/set-blocking.js | 12 - .../yargs/build/lib/utils/which-module.js | 10 - node_modules/yargs/build/lib/validation.js | 305 ---- node_modules/yargs/build/lib/yargs-factory.js | 1512 ----------------- node_modules/yargs/build/lib/yerror.js | 9 - node_modules/yargs/helpers/helpers.mjs | 10 - node_modules/yargs/helpers/index.js | 14 - node_modules/yargs/helpers/package.json | 3 - node_modules/yargs/index.cjs | 53 - node_modules/yargs/index.mjs | 8 - .../yargs/lib/platform-shims/browser.mjs | 95 -- node_modules/yargs/lib/platform-shims/esm.mjs | 73 - node_modules/yargs/locales/be.json | 46 - node_modules/yargs/locales/cs.json | 51 - node_modules/yargs/locales/de.json | 46 - node_modules/yargs/locales/en.json | 55 - node_modules/yargs/locales/es.json | 46 - node_modules/yargs/locales/fi.json | 49 - node_modules/yargs/locales/fr.json | 53 - node_modules/yargs/locales/hi.json | 49 - node_modules/yargs/locales/hu.json | 46 - node_modules/yargs/locales/id.json | 50 - node_modules/yargs/locales/it.json | 46 - node_modules/yargs/locales/ja.json | 51 - node_modules/yargs/locales/ko.json | 49 - node_modules/yargs/locales/nb.json | 44 - node_modules/yargs/locales/nl.json | 49 - node_modules/yargs/locales/nn.json | 44 - node_modules/yargs/locales/pirate.json | 13 - node_modules/yargs/locales/pl.json | 49 - node_modules/yargs/locales/pt.json | 45 - node_modules/yargs/locales/pt_BR.json | 48 - node_modules/yargs/locales/ru.json | 51 - node_modules/yargs/locales/th.json | 46 - node_modules/yargs/locales/tr.json | 48 - node_modules/yargs/locales/uk_UA.json | 51 - node_modules/yargs/locales/uz.json | 52 - node_modules/yargs/locales/zh_CN.json | 48 - node_modules/yargs/locales/zh_TW.json | 51 - node_modules/yargs/package.json | 123 -- node_modules/yargs/yargs | 9 - node_modules/yargs/yargs.mjs | 10 - 885 files changed, 81687 deletions(-) delete mode 120000 node_modules/.bin/nanoid delete mode 120000 node_modules/.bin/postcss delete mode 120000 node_modules/.bin/yaml delete mode 100644 node_modules/.package-lock.json delete mode 100644 node_modules/@nodelib/fs.scandir/LICENSE delete mode 100644 node_modules/@nodelib/fs.scandir/README.md delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/constants.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/constants.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/package.json delete mode 100644 node_modules/@nodelib/fs.stat/LICENSE delete mode 100644 node_modules/@nodelib/fs.stat/README.md delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.js delete mode 100644 node_modules/@nodelib/fs.stat/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/index.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.js delete mode 100644 node_modules/@nodelib/fs.stat/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.stat/package.json delete mode 100644 node_modules/@nodelib/fs.walk/LICENSE delete mode 100644 node_modules/@nodelib/fs.walk/README.md delete mode 100644 node_modules/@nodelib/fs.walk/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/index.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.js delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.js delete mode 100644 node_modules/@nodelib/fs.walk/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.walk/package.json delete mode 100644 node_modules/@sindresorhus/merge-streams/index.d.ts delete mode 100644 node_modules/@sindresorhus/merge-streams/index.js delete mode 100644 node_modules/@sindresorhus/merge-streams/license delete mode 100644 node_modules/@sindresorhus/merge-streams/package.json delete mode 100644 node_modules/@sindresorhus/merge-streams/readme.md delete mode 100644 node_modules/ansi-regex/index.d.ts delete mode 100644 node_modules/ansi-regex/index.js delete mode 100644 node_modules/ansi-regex/license delete mode 100644 node_modules/ansi-regex/package.json delete mode 100644 node_modules/ansi-regex/readme.md delete mode 100644 node_modules/ansi-styles/index.d.ts delete mode 100644 node_modules/ansi-styles/index.js delete mode 100644 node_modules/ansi-styles/license delete mode 100644 node_modules/ansi-styles/package.json delete mode 100644 node_modules/ansi-styles/readme.md delete mode 100644 node_modules/anymatch/LICENSE delete mode 100644 node_modules/anymatch/README.md delete mode 100644 node_modules/anymatch/index.d.ts delete mode 100644 node_modules/anymatch/index.js delete mode 100644 node_modules/anymatch/package.json delete mode 100644 node_modules/binary-extensions/binary-extensions.json delete mode 100644 node_modules/binary-extensions/binary-extensions.json.d.ts delete mode 100644 node_modules/binary-extensions/index.d.ts delete mode 100644 node_modules/binary-extensions/index.js delete mode 100644 node_modules/binary-extensions/license delete mode 100644 node_modules/binary-extensions/package.json delete mode 100644 node_modules/binary-extensions/readme.md delete mode 100644 node_modules/braces/LICENSE delete mode 100644 node_modules/braces/README.md delete mode 100644 node_modules/braces/index.js delete mode 100644 node_modules/braces/lib/compile.js delete mode 100644 node_modules/braces/lib/constants.js delete mode 100644 node_modules/braces/lib/expand.js delete mode 100644 node_modules/braces/lib/parse.js delete mode 100644 node_modules/braces/lib/stringify.js delete mode 100644 node_modules/braces/lib/utils.js delete mode 100644 node_modules/braces/package.json delete mode 100644 node_modules/chokidar/LICENSE delete mode 100644 node_modules/chokidar/README.md delete mode 100644 node_modules/chokidar/index.js delete mode 100644 node_modules/chokidar/lib/constants.js delete mode 100644 node_modules/chokidar/lib/fsevents-handler.js delete mode 100644 node_modules/chokidar/lib/nodefs-handler.js delete mode 100644 node_modules/chokidar/package.json delete mode 100644 node_modules/chokidar/types/index.d.ts delete mode 100644 node_modules/cliui/CHANGELOG.md delete mode 100644 node_modules/cliui/LICENSE.txt delete mode 100644 node_modules/cliui/README.md delete mode 100644 node_modules/cliui/build/index.cjs delete mode 100644 node_modules/cliui/build/index.d.cts delete mode 100644 node_modules/cliui/build/lib/index.js delete mode 100644 node_modules/cliui/build/lib/string-utils.js delete mode 100644 node_modules/cliui/index.mjs delete mode 100644 node_modules/cliui/package.json delete mode 100644 node_modules/color-convert/CHANGELOG.md delete mode 100644 node_modules/color-convert/LICENSE delete mode 100644 node_modules/color-convert/README.md delete mode 100644 node_modules/color-convert/conversions.js delete mode 100644 node_modules/color-convert/index.js delete mode 100644 node_modules/color-convert/package.json delete mode 100644 node_modules/color-convert/route.js delete mode 100644 node_modules/color-name/LICENSE delete mode 100644 node_modules/color-name/README.md delete mode 100644 node_modules/color-name/index.js delete mode 100644 node_modules/color-name/package.json delete mode 100755 node_modules/dependency-graph/CHANGELOG.md delete mode 100755 node_modules/dependency-graph/LICENSE delete mode 100755 node_modules/dependency-graph/README.md delete mode 100755 node_modules/dependency-graph/lib/dep_graph.js delete mode 100755 node_modules/dependency-graph/lib/index.d.ts delete mode 100755 node_modules/dependency-graph/package.json delete mode 100755 node_modules/dependency-graph/specs/dep_graph_spec.js delete mode 100644 node_modules/emoji-regex/LICENSE-MIT.txt delete mode 100644 node_modules/emoji-regex/README.md delete mode 100644 node_modules/emoji-regex/es2015/index.js delete mode 100644 node_modules/emoji-regex/es2015/text.js delete mode 100644 node_modules/emoji-regex/index.d.ts delete mode 100644 node_modules/emoji-regex/index.js delete mode 100644 node_modules/emoji-regex/package.json delete mode 100644 node_modules/emoji-regex/text.js delete mode 100644 node_modules/escalade/dist/index.js delete mode 100644 node_modules/escalade/dist/index.mjs delete mode 100644 node_modules/escalade/index.d.mts delete mode 100644 node_modules/escalade/index.d.ts delete mode 100644 node_modules/escalade/license delete mode 100644 node_modules/escalade/package.json delete mode 100644 node_modules/escalade/readme.md delete mode 100644 node_modules/escalade/sync/index.d.mts delete mode 100644 node_modules/escalade/sync/index.d.ts delete mode 100644 node_modules/escalade/sync/index.js delete mode 100644 node_modules/escalade/sync/index.mjs delete mode 100644 node_modules/fast-glob/LICENSE delete mode 100644 node_modules/fast-glob/README.md delete mode 100644 node_modules/fast-glob/out/index.d.ts delete mode 100644 node_modules/fast-glob/out/index.js delete mode 100644 node_modules/fast-glob/out/managers/tasks.d.ts delete mode 100644 node_modules/fast-glob/out/managers/tasks.js delete mode 100644 node_modules/fast-glob/out/providers/async.d.ts delete mode 100644 node_modules/fast-glob/out/providers/async.js delete mode 100644 node_modules/fast-glob/out/providers/filters/deep.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/deep.js delete mode 100644 node_modules/fast-glob/out/providers/filters/entry.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/entry.js delete mode 100644 node_modules/fast-glob/out/providers/filters/error.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/error.js delete mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.d.ts delete mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.js delete mode 100644 node_modules/fast-glob/out/providers/matchers/partial.d.ts delete mode 100644 node_modules/fast-glob/out/providers/matchers/partial.js delete mode 100644 node_modules/fast-glob/out/providers/provider.d.ts delete mode 100644 node_modules/fast-glob/out/providers/provider.js delete mode 100644 node_modules/fast-glob/out/providers/stream.d.ts delete mode 100644 node_modules/fast-glob/out/providers/stream.js delete mode 100644 node_modules/fast-glob/out/providers/sync.d.ts delete mode 100644 node_modules/fast-glob/out/providers/sync.js delete mode 100644 node_modules/fast-glob/out/providers/transformers/entry.d.ts delete mode 100644 node_modules/fast-glob/out/providers/transformers/entry.js delete mode 100644 node_modules/fast-glob/out/readers/async.d.ts delete mode 100644 node_modules/fast-glob/out/readers/async.js delete mode 100644 node_modules/fast-glob/out/readers/reader.d.ts delete mode 100644 node_modules/fast-glob/out/readers/reader.js delete mode 100644 node_modules/fast-glob/out/readers/stream.d.ts delete mode 100644 node_modules/fast-glob/out/readers/stream.js delete mode 100644 node_modules/fast-glob/out/readers/sync.d.ts delete mode 100644 node_modules/fast-glob/out/readers/sync.js delete mode 100644 node_modules/fast-glob/out/settings.d.ts delete mode 100644 node_modules/fast-glob/out/settings.js delete mode 100644 node_modules/fast-glob/out/types/index.d.ts delete mode 100644 node_modules/fast-glob/out/types/index.js delete mode 100644 node_modules/fast-glob/out/utils/array.d.ts delete mode 100644 node_modules/fast-glob/out/utils/array.js delete mode 100644 node_modules/fast-glob/out/utils/errno.d.ts delete mode 100644 node_modules/fast-glob/out/utils/errno.js delete mode 100644 node_modules/fast-glob/out/utils/fs.d.ts delete mode 100644 node_modules/fast-glob/out/utils/fs.js delete mode 100644 node_modules/fast-glob/out/utils/index.d.ts delete mode 100644 node_modules/fast-glob/out/utils/index.js delete mode 100644 node_modules/fast-glob/out/utils/path.d.ts delete mode 100644 node_modules/fast-glob/out/utils/path.js delete mode 100644 node_modules/fast-glob/out/utils/pattern.d.ts delete mode 100644 node_modules/fast-glob/out/utils/pattern.js delete mode 100644 node_modules/fast-glob/out/utils/stream.d.ts delete mode 100644 node_modules/fast-glob/out/utils/stream.js delete mode 100644 node_modules/fast-glob/out/utils/string.d.ts delete mode 100644 node_modules/fast-glob/out/utils/string.js delete mode 100644 node_modules/fast-glob/package.json delete mode 100644 node_modules/fastq/.github/dependabot.yml delete mode 100644 node_modules/fastq/.github/workflows/ci.yml delete mode 100644 node_modules/fastq/LICENSE delete mode 100644 node_modules/fastq/README.md delete mode 100644 node_modules/fastq/bench.js delete mode 100644 node_modules/fastq/example.js delete mode 100644 node_modules/fastq/example.mjs delete mode 100644 node_modules/fastq/index.d.ts delete mode 100644 node_modules/fastq/package.json delete mode 100644 node_modules/fastq/queue.js delete mode 100644 node_modules/fastq/test/example.ts delete mode 100644 node_modules/fastq/test/promise.js delete mode 100644 node_modules/fastq/test/test.js delete mode 100644 node_modules/fastq/test/tsconfig.json delete mode 100644 node_modules/fill-range/LICENSE delete mode 100644 node_modules/fill-range/README.md delete mode 100644 node_modules/fill-range/index.js delete mode 100644 node_modules/fill-range/package.json delete mode 100644 node_modules/fs-extra/LICENSE delete mode 100644 node_modules/fs-extra/README.md delete mode 100644 node_modules/fs-extra/lib/copy/copy-sync.js delete mode 100644 node_modules/fs-extra/lib/copy/copy.js delete mode 100644 node_modules/fs-extra/lib/copy/index.js delete mode 100644 node_modules/fs-extra/lib/empty/index.js delete mode 100644 node_modules/fs-extra/lib/ensure/file.js delete mode 100644 node_modules/fs-extra/lib/ensure/index.js delete mode 100644 node_modules/fs-extra/lib/ensure/link.js delete mode 100644 node_modules/fs-extra/lib/ensure/symlink-paths.js delete mode 100644 node_modules/fs-extra/lib/ensure/symlink-type.js delete mode 100644 node_modules/fs-extra/lib/ensure/symlink.js delete mode 100644 node_modules/fs-extra/lib/esm.mjs delete mode 100644 node_modules/fs-extra/lib/fs/index.js delete mode 100644 node_modules/fs-extra/lib/index.js delete mode 100644 node_modules/fs-extra/lib/json/index.js delete mode 100644 node_modules/fs-extra/lib/json/jsonfile.js delete mode 100644 node_modules/fs-extra/lib/json/output-json-sync.js delete mode 100644 node_modules/fs-extra/lib/json/output-json.js delete mode 100644 node_modules/fs-extra/lib/mkdirs/index.js delete mode 100644 node_modules/fs-extra/lib/mkdirs/make-dir.js delete mode 100644 node_modules/fs-extra/lib/mkdirs/utils.js delete mode 100644 node_modules/fs-extra/lib/move/index.js delete mode 100644 node_modules/fs-extra/lib/move/move-sync.js delete mode 100644 node_modules/fs-extra/lib/move/move.js delete mode 100644 node_modules/fs-extra/lib/output-file/index.js delete mode 100644 node_modules/fs-extra/lib/path-exists/index.js delete mode 100644 node_modules/fs-extra/lib/remove/index.js delete mode 100644 node_modules/fs-extra/lib/util/stat.js delete mode 100644 node_modules/fs-extra/lib/util/utimes.js delete mode 100644 node_modules/fs-extra/package.json delete mode 100644 node_modules/get-caller-file/LICENSE.md delete mode 100644 node_modules/get-caller-file/README.md delete mode 100644 node_modules/get-caller-file/index.d.ts delete mode 100644 node_modules/get-caller-file/index.js delete mode 100644 node_modules/get-caller-file/index.js.map delete mode 100644 node_modules/get-caller-file/package.json delete mode 100644 node_modules/get-stdin/index.d.ts delete mode 100644 node_modules/get-stdin/index.js delete mode 100644 node_modules/get-stdin/license delete mode 100644 node_modules/get-stdin/package.json delete mode 100644 node_modules/get-stdin/readme.md delete mode 100644 node_modules/glob-parent/CHANGELOG.md delete mode 100644 node_modules/glob-parent/LICENSE delete mode 100644 node_modules/glob-parent/README.md delete mode 100644 node_modules/glob-parent/index.js delete mode 100644 node_modules/glob-parent/package.json delete mode 100644 node_modules/globby/ignore.js delete mode 100644 node_modules/globby/index.d.ts delete mode 100644 node_modules/globby/index.js delete mode 100644 node_modules/globby/license delete mode 100644 node_modules/globby/package.json delete mode 100644 node_modules/globby/readme.md delete mode 100644 node_modules/globby/utilities.js delete mode 100644 node_modules/graceful-fs/LICENSE delete mode 100644 node_modules/graceful-fs/README.md delete mode 100644 node_modules/graceful-fs/clone.js delete mode 100644 node_modules/graceful-fs/graceful-fs.js delete mode 100644 node_modules/graceful-fs/legacy-streams.js delete mode 100644 node_modules/graceful-fs/package.json delete mode 100644 node_modules/graceful-fs/polyfills.js delete mode 100644 node_modules/ignore/LICENSE-MIT delete mode 100644 node_modules/ignore/README.md delete mode 100644 node_modules/ignore/index.d.ts delete mode 100644 node_modules/ignore/index.js delete mode 100644 node_modules/ignore/legacy.js delete mode 100644 node_modules/ignore/package.json delete mode 100644 node_modules/is-binary-path/index.d.ts delete mode 100644 node_modules/is-binary-path/index.js delete mode 100644 node_modules/is-binary-path/license delete mode 100644 node_modules/is-binary-path/package.json delete mode 100644 node_modules/is-binary-path/readme.md delete mode 100644 node_modules/is-extglob/LICENSE delete mode 100644 node_modules/is-extglob/README.md delete mode 100644 node_modules/is-extglob/index.js delete mode 100644 node_modules/is-extglob/package.json delete mode 100644 node_modules/is-fullwidth-code-point/index.d.ts delete mode 100644 node_modules/is-fullwidth-code-point/index.js delete mode 100644 node_modules/is-fullwidth-code-point/license delete mode 100644 node_modules/is-fullwidth-code-point/package.json delete mode 100644 node_modules/is-fullwidth-code-point/readme.md delete mode 100644 node_modules/is-glob/LICENSE delete mode 100644 node_modules/is-glob/README.md delete mode 100644 node_modules/is-glob/index.js delete mode 100644 node_modules/is-glob/package.json delete mode 100644 node_modules/is-number/LICENSE delete mode 100644 node_modules/is-number/README.md delete mode 100644 node_modules/is-number/index.js delete mode 100644 node_modules/is-number/package.json delete mode 100644 node_modules/jsonfile/CHANGELOG.md delete mode 100644 node_modules/jsonfile/LICENSE delete mode 100644 node_modules/jsonfile/README.md delete mode 100644 node_modules/jsonfile/index.js delete mode 100644 node_modules/jsonfile/package.json delete mode 100644 node_modules/jsonfile/utils.js delete mode 100644 node_modules/lilconfig/LICENSE delete mode 100644 node_modules/lilconfig/package.json delete mode 100644 node_modules/lilconfig/readme.md delete mode 100644 node_modules/lilconfig/src/index.d.ts delete mode 100644 node_modules/lilconfig/src/index.js delete mode 100644 node_modules/merge2/LICENSE delete mode 100644 node_modules/merge2/README.md delete mode 100644 node_modules/merge2/index.js delete mode 100644 node_modules/merge2/package.json delete mode 100755 node_modules/micromatch/LICENSE delete mode 100644 node_modules/micromatch/README.md delete mode 100644 node_modules/micromatch/index.js delete mode 100644 node_modules/micromatch/package.json delete mode 100644 node_modules/nanoid/.devcontainer.json delete mode 100644 node_modules/nanoid/LICENSE delete mode 100644 node_modules/nanoid/README.md delete mode 100644 node_modules/nanoid/async/index.browser.cjs delete mode 100644 node_modules/nanoid/async/index.browser.js delete mode 100644 node_modules/nanoid/async/index.cjs delete mode 100644 node_modules/nanoid/async/index.d.ts delete mode 100644 node_modules/nanoid/async/index.js delete mode 100644 node_modules/nanoid/async/index.native.js delete mode 100644 node_modules/nanoid/async/package.json delete mode 100755 node_modules/nanoid/bin/nanoid.cjs delete mode 100644 node_modules/nanoid/index.browser.cjs delete mode 100644 node_modules/nanoid/index.browser.js delete mode 100644 node_modules/nanoid/index.cjs delete mode 100644 node_modules/nanoid/index.d.cts delete mode 100644 node_modules/nanoid/index.d.ts delete mode 100644 node_modules/nanoid/index.js delete mode 100644 node_modules/nanoid/nanoid.js delete mode 100644 node_modules/nanoid/non-secure/index.cjs delete mode 100644 node_modules/nanoid/non-secure/index.d.ts delete mode 100644 node_modules/nanoid/non-secure/index.js delete mode 100644 node_modules/nanoid/non-secure/package.json delete mode 100644 node_modules/nanoid/package.json delete mode 100644 node_modules/nanoid/url-alphabet/index.cjs delete mode 100644 node_modules/nanoid/url-alphabet/index.js delete mode 100644 node_modules/nanoid/url-alphabet/package.json delete mode 100644 node_modules/normalize-path/LICENSE delete mode 100644 node_modules/normalize-path/README.md delete mode 100644 node_modules/normalize-path/index.js delete mode 100644 node_modules/normalize-path/package.json delete mode 100644 node_modules/path-type/index.d.ts delete mode 100644 node_modules/path-type/index.js delete mode 100644 node_modules/path-type/license delete mode 100644 node_modules/path-type/package.json delete mode 100644 node_modules/path-type/readme.md delete mode 100644 node_modules/picocolors/LICENSE delete mode 100644 node_modules/picocolors/README.md delete mode 100644 node_modules/picocolors/package.json delete mode 100644 node_modules/picocolors/picocolors.browser.js delete mode 100644 node_modules/picocolors/picocolors.d.ts delete mode 100644 node_modules/picocolors/picocolors.js delete mode 100644 node_modules/picocolors/types.d.ts delete mode 100644 node_modules/picomatch/CHANGELOG.md delete mode 100644 node_modules/picomatch/LICENSE delete mode 100644 node_modules/picomatch/README.md delete mode 100644 node_modules/picomatch/index.js delete mode 100644 node_modules/picomatch/lib/constants.js delete mode 100644 node_modules/picomatch/lib/parse.js delete mode 100644 node_modules/picomatch/lib/picomatch.js delete mode 100644 node_modules/picomatch/lib/scan.js delete mode 100644 node_modules/picomatch/lib/utils.js delete mode 100644 node_modules/picomatch/package.json delete mode 100644 node_modules/pify/index.js delete mode 100644 node_modules/pify/license delete mode 100644 node_modules/pify/package.json delete mode 100644 node_modules/pify/readme.md delete mode 100644 node_modules/postcss-cli/LICENSE delete mode 100644 node_modules/postcss-cli/README.md delete mode 100755 node_modules/postcss-cli/index.js delete mode 100644 node_modules/postcss-cli/lib/DependencyGraph.js delete mode 100644 node_modules/postcss-cli/lib/args.js delete mode 100644 node_modules/postcss-cli/lib/getMapfile.js delete mode 100644 node_modules/postcss-cli/package.json delete mode 100644 node_modules/postcss-load-config/LICENSE delete mode 100644 node_modules/postcss-load-config/README.md delete mode 100644 node_modules/postcss-load-config/package.json delete mode 100644 node_modules/postcss-load-config/src/index.d.ts delete mode 100644 node_modules/postcss-load-config/src/index.js delete mode 100644 node_modules/postcss-load-config/src/options.js delete mode 100644 node_modules/postcss-load-config/src/plugins.js delete mode 100644 node_modules/postcss-load-config/src/req.js delete mode 100644 node_modules/postcss-reporter/LICENSE delete mode 100644 node_modules/postcss-reporter/README.md delete mode 100644 node_modules/postcss-reporter/index.js delete mode 100644 node_modules/postcss-reporter/lib/formatter.js delete mode 100644 node_modules/postcss-reporter/lib/reporter.js delete mode 100644 node_modules/postcss-reporter/lib/util.js delete mode 100644 node_modules/postcss-reporter/package.json delete mode 100644 node_modules/postcss/LICENSE delete mode 100644 node_modules/postcss/README.md delete mode 100644 node_modules/postcss/lib/at-rule.d.ts delete mode 100644 node_modules/postcss/lib/at-rule.js delete mode 100644 node_modules/postcss/lib/comment.d.ts delete mode 100644 node_modules/postcss/lib/comment.js delete mode 100644 node_modules/postcss/lib/container.d.ts delete mode 100644 node_modules/postcss/lib/container.js delete mode 100644 node_modules/postcss/lib/css-syntax-error.d.ts delete mode 100644 node_modules/postcss/lib/css-syntax-error.js delete mode 100644 node_modules/postcss/lib/declaration.d.ts delete mode 100644 node_modules/postcss/lib/declaration.js delete mode 100644 node_modules/postcss/lib/document.d.ts delete mode 100644 node_modules/postcss/lib/document.js delete mode 100644 node_modules/postcss/lib/fromJSON.d.ts delete mode 100644 node_modules/postcss/lib/fromJSON.js delete mode 100644 node_modules/postcss/lib/input.d.ts delete mode 100644 node_modules/postcss/lib/input.js delete mode 100644 node_modules/postcss/lib/lazy-result.d.ts delete mode 100644 node_modules/postcss/lib/lazy-result.js delete mode 100644 node_modules/postcss/lib/list.d.ts delete mode 100644 node_modules/postcss/lib/list.js delete mode 100644 node_modules/postcss/lib/map-generator.js delete mode 100644 node_modules/postcss/lib/no-work-result.d.ts delete mode 100644 node_modules/postcss/lib/no-work-result.js delete mode 100644 node_modules/postcss/lib/node.d.ts delete mode 100644 node_modules/postcss/lib/node.js delete mode 100644 node_modules/postcss/lib/parse.d.ts delete mode 100644 node_modules/postcss/lib/parse.js delete mode 100644 node_modules/postcss/lib/parser.js delete mode 100644 node_modules/postcss/lib/postcss.d.mts delete mode 100644 node_modules/postcss/lib/postcss.d.ts delete mode 100644 node_modules/postcss/lib/postcss.js delete mode 100644 node_modules/postcss/lib/postcss.mjs delete mode 100644 node_modules/postcss/lib/previous-map.d.ts delete mode 100644 node_modules/postcss/lib/previous-map.js delete mode 100644 node_modules/postcss/lib/processor.d.ts delete mode 100644 node_modules/postcss/lib/processor.js delete mode 100644 node_modules/postcss/lib/result.d.ts delete mode 100644 node_modules/postcss/lib/result.js delete mode 100644 node_modules/postcss/lib/root.d.ts delete mode 100644 node_modules/postcss/lib/root.js delete mode 100644 node_modules/postcss/lib/rule.d.ts delete mode 100644 node_modules/postcss/lib/rule.js delete mode 100644 node_modules/postcss/lib/stringifier.d.ts delete mode 100644 node_modules/postcss/lib/stringifier.js delete mode 100644 node_modules/postcss/lib/stringify.d.ts delete mode 100644 node_modules/postcss/lib/stringify.js delete mode 100644 node_modules/postcss/lib/symbols.js delete mode 100644 node_modules/postcss/lib/terminal-highlight.js delete mode 100644 node_modules/postcss/lib/tokenize.js delete mode 100644 node_modules/postcss/lib/warn-once.js delete mode 100644 node_modules/postcss/lib/warning.d.ts delete mode 100644 node_modules/postcss/lib/warning.js delete mode 100755 node_modules/postcss/package.json delete mode 100644 node_modules/pretty-hrtime/.jshintignore delete mode 100644 node_modules/pretty-hrtime/.npmignore delete mode 100644 node_modules/pretty-hrtime/LICENSE delete mode 100644 node_modules/pretty-hrtime/README.md delete mode 100644 node_modules/pretty-hrtime/index.js delete mode 100644 node_modules/pretty-hrtime/package.json delete mode 100755 node_modules/queue-microtask/LICENSE delete mode 100644 node_modules/queue-microtask/README.md delete mode 100644 node_modules/queue-microtask/index.d.ts delete mode 100644 node_modules/queue-microtask/index.js delete mode 100644 node_modules/queue-microtask/package.json delete mode 100644 node_modules/read-cache/LICENSE delete mode 100644 node_modules/read-cache/README.md delete mode 100644 node_modules/read-cache/index.js delete mode 100644 node_modules/read-cache/package.json delete mode 100644 node_modules/readdirp/LICENSE delete mode 100644 node_modules/readdirp/README.md delete mode 100644 node_modules/readdirp/index.d.ts delete mode 100644 node_modules/readdirp/index.js delete mode 100644 node_modules/readdirp/package.json delete mode 100644 node_modules/require-directory/.jshintrc delete mode 100644 node_modules/require-directory/.npmignore delete mode 100644 node_modules/require-directory/.travis.yml delete mode 100644 node_modules/require-directory/LICENSE delete mode 100644 node_modules/require-directory/README.markdown delete mode 100644 node_modules/require-directory/index.js delete mode 100644 node_modules/require-directory/package.json delete mode 100644 node_modules/reusify/.coveralls.yml delete mode 100644 node_modules/reusify/.travis.yml delete mode 100644 node_modules/reusify/LICENSE delete mode 100644 node_modules/reusify/README.md delete mode 100644 node_modules/reusify/benchmarks/createNoCodeFunction.js delete mode 100644 node_modules/reusify/benchmarks/fib.js delete mode 100644 node_modules/reusify/benchmarks/reuseNoCodeFunction.js delete mode 100644 node_modules/reusify/package.json delete mode 100644 node_modules/reusify/reusify.js delete mode 100644 node_modules/reusify/test.js delete mode 100644 node_modules/run-parallel/LICENSE delete mode 100644 node_modules/run-parallel/README.md delete mode 100644 node_modules/run-parallel/index.js delete mode 100644 node_modules/run-parallel/package.json delete mode 100644 node_modules/slash/index.d.ts delete mode 100644 node_modules/slash/index.js delete mode 100644 node_modules/slash/license delete mode 100644 node_modules/slash/package.json delete mode 100644 node_modules/slash/readme.md delete mode 100644 node_modules/source-map-js/LICENSE delete mode 100644 node_modules/source-map-js/README.md delete mode 100644 node_modules/source-map-js/lib/array-set.js delete mode 100644 node_modules/source-map-js/lib/base64-vlq.js delete mode 100644 node_modules/source-map-js/lib/base64.js delete mode 100644 node_modules/source-map-js/lib/binary-search.js delete mode 100644 node_modules/source-map-js/lib/mapping-list.js delete mode 100644 node_modules/source-map-js/lib/quick-sort.js delete mode 100644 node_modules/source-map-js/lib/source-map-consumer.d.ts delete mode 100644 node_modules/source-map-js/lib/source-map-consumer.js delete mode 100644 node_modules/source-map-js/lib/source-map-generator.d.ts delete mode 100644 node_modules/source-map-js/lib/source-map-generator.js delete mode 100644 node_modules/source-map-js/lib/source-node.d.ts delete mode 100644 node_modules/source-map-js/lib/source-node.js delete mode 100644 node_modules/source-map-js/lib/util.js delete mode 100644 node_modules/source-map-js/package.json delete mode 100644 node_modules/source-map-js/source-map.d.ts delete mode 100644 node_modules/source-map-js/source-map.js delete mode 100644 node_modules/string-width/index.d.ts delete mode 100644 node_modules/string-width/index.js delete mode 100644 node_modules/string-width/license delete mode 100644 node_modules/string-width/package.json delete mode 100644 node_modules/string-width/readme.md delete mode 100644 node_modules/strip-ansi/index.d.ts delete mode 100644 node_modules/strip-ansi/index.js delete mode 100644 node_modules/strip-ansi/license delete mode 100644 node_modules/strip-ansi/package.json delete mode 100644 node_modules/strip-ansi/readme.md delete mode 100644 node_modules/thenby/LICENSE.TXT delete mode 100644 node_modules/thenby/README.md delete mode 100644 node_modules/thenby/package.json delete mode 100644 node_modules/thenby/thenBy.min.js delete mode 100644 node_modules/thenby/thenBy.module.d.ts delete mode 100644 node_modules/thenby/thenBy.module.js delete mode 100644 node_modules/to-regex-range/LICENSE delete mode 100644 node_modules/to-regex-range/README.md delete mode 100644 node_modules/to-regex-range/index.js delete mode 100644 node_modules/to-regex-range/package.json delete mode 100644 node_modules/unicorn-magic/default.js delete mode 100644 node_modules/unicorn-magic/index.d.ts delete mode 100644 node_modules/unicorn-magic/license delete mode 100644 node_modules/unicorn-magic/node.js delete mode 100644 node_modules/unicorn-magic/package.json delete mode 100644 node_modules/unicorn-magic/readme.md delete mode 100644 node_modules/universalify/LICENSE delete mode 100644 node_modules/universalify/README.md delete mode 100644 node_modules/universalify/index.js delete mode 100644 node_modules/universalify/package.json delete mode 100755 node_modules/wrap-ansi/index.js delete mode 100644 node_modules/wrap-ansi/license delete mode 100644 node_modules/wrap-ansi/package.json delete mode 100644 node_modules/wrap-ansi/readme.md delete mode 100644 node_modules/y18n/CHANGELOG.md delete mode 100644 node_modules/y18n/LICENSE delete mode 100644 node_modules/y18n/README.md delete mode 100644 node_modules/y18n/build/index.cjs delete mode 100644 node_modules/y18n/build/lib/cjs.js delete mode 100644 node_modules/y18n/build/lib/index.js delete mode 100644 node_modules/y18n/build/lib/platform-shims/node.js delete mode 100644 node_modules/y18n/index.mjs delete mode 100644 node_modules/y18n/package.json delete mode 100644 node_modules/yaml/LICENSE delete mode 100644 node_modules/yaml/README.md delete mode 100755 node_modules/yaml/bin.mjs delete mode 100644 node_modules/yaml/browser/dist/compose/compose-collection.js delete mode 100644 node_modules/yaml/browser/dist/compose/compose-doc.js delete mode 100644 node_modules/yaml/browser/dist/compose/compose-node.js delete mode 100644 node_modules/yaml/browser/dist/compose/compose-scalar.js delete mode 100644 node_modules/yaml/browser/dist/compose/composer.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-block-map.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-block-scalar.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-block-seq.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-end.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-flow-collection.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js delete mode 100644 node_modules/yaml/browser/dist/compose/resolve-props.js delete mode 100644 node_modules/yaml/browser/dist/compose/util-contains-newline.js delete mode 100644 node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js delete mode 100644 node_modules/yaml/browser/dist/compose/util-flow-indent-check.js delete mode 100644 node_modules/yaml/browser/dist/compose/util-map-includes.js delete mode 100644 node_modules/yaml/browser/dist/doc/Document.js delete mode 100644 node_modules/yaml/browser/dist/doc/anchors.js delete mode 100644 node_modules/yaml/browser/dist/doc/applyReviver.js delete mode 100644 node_modules/yaml/browser/dist/doc/createNode.js delete mode 100644 node_modules/yaml/browser/dist/doc/directives.js delete mode 100644 node_modules/yaml/browser/dist/errors.js delete mode 100644 node_modules/yaml/browser/dist/index.js delete mode 100644 node_modules/yaml/browser/dist/log.js delete mode 100644 node_modules/yaml/browser/dist/nodes/Alias.js delete mode 100644 node_modules/yaml/browser/dist/nodes/Collection.js delete mode 100644 node_modules/yaml/browser/dist/nodes/Node.js delete mode 100644 node_modules/yaml/browser/dist/nodes/Pair.js delete mode 100644 node_modules/yaml/browser/dist/nodes/Scalar.js delete mode 100644 node_modules/yaml/browser/dist/nodes/YAMLMap.js delete mode 100644 node_modules/yaml/browser/dist/nodes/YAMLSeq.js delete mode 100644 node_modules/yaml/browser/dist/nodes/addPairToJSMap.js delete mode 100644 node_modules/yaml/browser/dist/nodes/identity.js delete mode 100644 node_modules/yaml/browser/dist/nodes/toJS.js delete mode 100644 node_modules/yaml/browser/dist/parse/cst-scalar.js delete mode 100644 node_modules/yaml/browser/dist/parse/cst-stringify.js delete mode 100644 node_modules/yaml/browser/dist/parse/cst-visit.js delete mode 100644 node_modules/yaml/browser/dist/parse/cst.js delete mode 100644 node_modules/yaml/browser/dist/parse/lexer.js delete mode 100644 node_modules/yaml/browser/dist/parse/line-counter.js delete mode 100644 node_modules/yaml/browser/dist/parse/parser.js delete mode 100644 node_modules/yaml/browser/dist/public-api.js delete mode 100644 node_modules/yaml/browser/dist/schema/Schema.js delete mode 100644 node_modules/yaml/browser/dist/schema/common/map.js delete mode 100644 node_modules/yaml/browser/dist/schema/common/null.js delete mode 100644 node_modules/yaml/browser/dist/schema/common/seq.js delete mode 100644 node_modules/yaml/browser/dist/schema/common/string.js delete mode 100644 node_modules/yaml/browser/dist/schema/core/bool.js delete mode 100644 node_modules/yaml/browser/dist/schema/core/float.js delete mode 100644 node_modules/yaml/browser/dist/schema/core/int.js delete mode 100644 node_modules/yaml/browser/dist/schema/core/schema.js delete mode 100644 node_modules/yaml/browser/dist/schema/json/schema.js delete mode 100644 node_modules/yaml/browser/dist/schema/tags.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/float.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/int.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/merge.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/set.js delete mode 100644 node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js delete mode 100644 node_modules/yaml/browser/dist/stringify/foldFlowLines.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringify.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyCollection.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyComment.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyDocument.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyNumber.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyPair.js delete mode 100644 node_modules/yaml/browser/dist/stringify/stringifyString.js delete mode 100644 node_modules/yaml/browser/dist/util.js delete mode 100644 node_modules/yaml/browser/dist/visit.js delete mode 100644 node_modules/yaml/browser/index.js delete mode 100644 node_modules/yaml/browser/package.json delete mode 100644 node_modules/yaml/dist/cli.d.ts delete mode 100644 node_modules/yaml/dist/cli.mjs delete mode 100644 node_modules/yaml/dist/compose/compose-collection.d.ts delete mode 100644 node_modules/yaml/dist/compose/compose-collection.js delete mode 100644 node_modules/yaml/dist/compose/compose-doc.d.ts delete mode 100644 node_modules/yaml/dist/compose/compose-doc.js delete mode 100644 node_modules/yaml/dist/compose/compose-node.d.ts delete mode 100644 node_modules/yaml/dist/compose/compose-node.js delete mode 100644 node_modules/yaml/dist/compose/compose-scalar.d.ts delete mode 100644 node_modules/yaml/dist/compose/compose-scalar.js delete mode 100644 node_modules/yaml/dist/compose/composer.d.ts delete mode 100644 node_modules/yaml/dist/compose/composer.js delete mode 100644 node_modules/yaml/dist/compose/resolve-block-map.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-block-map.js delete mode 100644 node_modules/yaml/dist/compose/resolve-block-scalar.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-block-scalar.js delete mode 100644 node_modules/yaml/dist/compose/resolve-block-seq.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-block-seq.js delete mode 100644 node_modules/yaml/dist/compose/resolve-end.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-end.js delete mode 100644 node_modules/yaml/dist/compose/resolve-flow-collection.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-flow-collection.js delete mode 100644 node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-flow-scalar.js delete mode 100644 node_modules/yaml/dist/compose/resolve-props.d.ts delete mode 100644 node_modules/yaml/dist/compose/resolve-props.js delete mode 100644 node_modules/yaml/dist/compose/util-contains-newline.d.ts delete mode 100644 node_modules/yaml/dist/compose/util-contains-newline.js delete mode 100644 node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts delete mode 100644 node_modules/yaml/dist/compose/util-empty-scalar-position.js delete mode 100644 node_modules/yaml/dist/compose/util-flow-indent-check.d.ts delete mode 100644 node_modules/yaml/dist/compose/util-flow-indent-check.js delete mode 100644 node_modules/yaml/dist/compose/util-map-includes.d.ts delete mode 100644 node_modules/yaml/dist/compose/util-map-includes.js delete mode 100644 node_modules/yaml/dist/doc/Document.d.ts delete mode 100644 node_modules/yaml/dist/doc/Document.js delete mode 100644 node_modules/yaml/dist/doc/anchors.d.ts delete mode 100644 node_modules/yaml/dist/doc/anchors.js delete mode 100644 node_modules/yaml/dist/doc/applyReviver.d.ts delete mode 100644 node_modules/yaml/dist/doc/applyReviver.js delete mode 100644 node_modules/yaml/dist/doc/createNode.d.ts delete mode 100644 node_modules/yaml/dist/doc/createNode.js delete mode 100644 node_modules/yaml/dist/doc/directives.d.ts delete mode 100644 node_modules/yaml/dist/doc/directives.js delete mode 100644 node_modules/yaml/dist/errors.d.ts delete mode 100644 node_modules/yaml/dist/errors.js delete mode 100644 node_modules/yaml/dist/index.d.ts delete mode 100644 node_modules/yaml/dist/index.js delete mode 100644 node_modules/yaml/dist/log.d.ts delete mode 100644 node_modules/yaml/dist/log.js delete mode 100644 node_modules/yaml/dist/nodes/Alias.d.ts delete mode 100644 node_modules/yaml/dist/nodes/Alias.js delete mode 100644 node_modules/yaml/dist/nodes/Collection.d.ts delete mode 100644 node_modules/yaml/dist/nodes/Collection.js delete mode 100644 node_modules/yaml/dist/nodes/Node.d.ts delete mode 100644 node_modules/yaml/dist/nodes/Node.js delete mode 100644 node_modules/yaml/dist/nodes/Pair.d.ts delete mode 100644 node_modules/yaml/dist/nodes/Pair.js delete mode 100644 node_modules/yaml/dist/nodes/Scalar.d.ts delete mode 100644 node_modules/yaml/dist/nodes/Scalar.js delete mode 100644 node_modules/yaml/dist/nodes/YAMLMap.d.ts delete mode 100644 node_modules/yaml/dist/nodes/YAMLMap.js delete mode 100644 node_modules/yaml/dist/nodes/YAMLSeq.d.ts delete mode 100644 node_modules/yaml/dist/nodes/YAMLSeq.js delete mode 100644 node_modules/yaml/dist/nodes/addPairToJSMap.d.ts delete mode 100644 node_modules/yaml/dist/nodes/addPairToJSMap.js delete mode 100644 node_modules/yaml/dist/nodes/identity.d.ts delete mode 100644 node_modules/yaml/dist/nodes/identity.js delete mode 100644 node_modules/yaml/dist/nodes/toJS.d.ts delete mode 100644 node_modules/yaml/dist/nodes/toJS.js delete mode 100644 node_modules/yaml/dist/options.d.ts delete mode 100644 node_modules/yaml/dist/parse/cst-scalar.d.ts delete mode 100644 node_modules/yaml/dist/parse/cst-scalar.js delete mode 100644 node_modules/yaml/dist/parse/cst-stringify.d.ts delete mode 100644 node_modules/yaml/dist/parse/cst-stringify.js delete mode 100644 node_modules/yaml/dist/parse/cst-visit.d.ts delete mode 100644 node_modules/yaml/dist/parse/cst-visit.js delete mode 100644 node_modules/yaml/dist/parse/cst.d.ts delete mode 100644 node_modules/yaml/dist/parse/cst.js delete mode 100644 node_modules/yaml/dist/parse/lexer.d.ts delete mode 100644 node_modules/yaml/dist/parse/lexer.js delete mode 100644 node_modules/yaml/dist/parse/line-counter.d.ts delete mode 100644 node_modules/yaml/dist/parse/line-counter.js delete mode 100644 node_modules/yaml/dist/parse/parser.d.ts delete mode 100644 node_modules/yaml/dist/parse/parser.js delete mode 100644 node_modules/yaml/dist/public-api.d.ts delete mode 100644 node_modules/yaml/dist/public-api.js delete mode 100644 node_modules/yaml/dist/schema/Schema.d.ts delete mode 100644 node_modules/yaml/dist/schema/Schema.js delete mode 100644 node_modules/yaml/dist/schema/common/map.d.ts delete mode 100644 node_modules/yaml/dist/schema/common/map.js delete mode 100644 node_modules/yaml/dist/schema/common/null.d.ts delete mode 100644 node_modules/yaml/dist/schema/common/null.js delete mode 100644 node_modules/yaml/dist/schema/common/seq.d.ts delete mode 100644 node_modules/yaml/dist/schema/common/seq.js delete mode 100644 node_modules/yaml/dist/schema/common/string.d.ts delete mode 100644 node_modules/yaml/dist/schema/common/string.js delete mode 100644 node_modules/yaml/dist/schema/core/bool.d.ts delete mode 100644 node_modules/yaml/dist/schema/core/bool.js delete mode 100644 node_modules/yaml/dist/schema/core/float.d.ts delete mode 100644 node_modules/yaml/dist/schema/core/float.js delete mode 100644 node_modules/yaml/dist/schema/core/int.d.ts delete mode 100644 node_modules/yaml/dist/schema/core/int.js delete mode 100644 node_modules/yaml/dist/schema/core/schema.d.ts delete mode 100644 node_modules/yaml/dist/schema/core/schema.js delete mode 100644 node_modules/yaml/dist/schema/json-schema.d.ts delete mode 100644 node_modules/yaml/dist/schema/json/schema.d.ts delete mode 100644 node_modules/yaml/dist/schema/json/schema.js delete mode 100644 node_modules/yaml/dist/schema/tags.d.ts delete mode 100644 node_modules/yaml/dist/schema/tags.js delete mode 100644 node_modules/yaml/dist/schema/types.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/binary.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/bool.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/float.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/float.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/int.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/int.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/merge.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/merge.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/omap.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/pairs.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/schema.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/set.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/set.js delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts delete mode 100644 node_modules/yaml/dist/schema/yaml-1.1/timestamp.js delete mode 100644 node_modules/yaml/dist/stringify/foldFlowLines.d.ts delete mode 100644 node_modules/yaml/dist/stringify/foldFlowLines.js delete mode 100644 node_modules/yaml/dist/stringify/stringify.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringify.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyCollection.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyCollection.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyComment.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyComment.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyDocument.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyDocument.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyNumber.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyNumber.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyPair.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyPair.js delete mode 100644 node_modules/yaml/dist/stringify/stringifyString.d.ts delete mode 100644 node_modules/yaml/dist/stringify/stringifyString.js delete mode 100644 node_modules/yaml/dist/test-events.d.ts delete mode 100644 node_modules/yaml/dist/test-events.js delete mode 100644 node_modules/yaml/dist/util.d.ts delete mode 100644 node_modules/yaml/dist/util.js delete mode 100644 node_modules/yaml/dist/visit.d.ts delete mode 100644 node_modules/yaml/dist/visit.js delete mode 100644 node_modules/yaml/package.json delete mode 100644 node_modules/yaml/util.js delete mode 100644 node_modules/yargs-parser/CHANGELOG.md delete mode 100644 node_modules/yargs-parser/LICENSE.txt delete mode 100644 node_modules/yargs-parser/README.md delete mode 100644 node_modules/yargs-parser/browser.js delete mode 100644 node_modules/yargs-parser/build/index.cjs delete mode 100644 node_modules/yargs-parser/build/lib/index.js delete mode 100644 node_modules/yargs-parser/build/lib/string-utils.js delete mode 100644 node_modules/yargs-parser/build/lib/tokenize-arg-string.js delete mode 100644 node_modules/yargs-parser/build/lib/yargs-parser-types.js delete mode 100644 node_modules/yargs-parser/build/lib/yargs-parser.js delete mode 100644 node_modules/yargs-parser/package.json delete mode 100644 node_modules/yargs/LICENSE delete mode 100644 node_modules/yargs/README.md delete mode 100644 node_modules/yargs/browser.d.ts delete mode 100644 node_modules/yargs/browser.mjs delete mode 100644 node_modules/yargs/build/index.cjs delete mode 100644 node_modules/yargs/build/lib/argsert.js delete mode 100644 node_modules/yargs/build/lib/command.js delete mode 100644 node_modules/yargs/build/lib/completion-templates.js delete mode 100644 node_modules/yargs/build/lib/completion.js delete mode 100644 node_modules/yargs/build/lib/middleware.js delete mode 100644 node_modules/yargs/build/lib/parse-command.js delete mode 100644 node_modules/yargs/build/lib/typings/common-types.js delete mode 100644 node_modules/yargs/build/lib/typings/yargs-parser-types.js delete mode 100644 node_modules/yargs/build/lib/usage.js delete mode 100644 node_modules/yargs/build/lib/utils/apply-extends.js delete mode 100644 node_modules/yargs/build/lib/utils/is-promise.js delete mode 100644 node_modules/yargs/build/lib/utils/levenshtein.js delete mode 100644 node_modules/yargs/build/lib/utils/maybe-async-result.js delete mode 100644 node_modules/yargs/build/lib/utils/obj-filter.js delete mode 100644 node_modules/yargs/build/lib/utils/process-argv.js delete mode 100644 node_modules/yargs/build/lib/utils/set-blocking.js delete mode 100644 node_modules/yargs/build/lib/utils/which-module.js delete mode 100644 node_modules/yargs/build/lib/validation.js delete mode 100644 node_modules/yargs/build/lib/yargs-factory.js delete mode 100644 node_modules/yargs/build/lib/yerror.js delete mode 100644 node_modules/yargs/helpers/helpers.mjs delete mode 100644 node_modules/yargs/helpers/index.js delete mode 100644 node_modules/yargs/helpers/package.json delete mode 100644 node_modules/yargs/index.cjs delete mode 100644 node_modules/yargs/index.mjs delete mode 100644 node_modules/yargs/lib/platform-shims/browser.mjs delete mode 100644 node_modules/yargs/lib/platform-shims/esm.mjs delete mode 100644 node_modules/yargs/locales/be.json delete mode 100644 node_modules/yargs/locales/cs.json delete mode 100644 node_modules/yargs/locales/de.json delete mode 100644 node_modules/yargs/locales/en.json delete mode 100644 node_modules/yargs/locales/es.json delete mode 100644 node_modules/yargs/locales/fi.json delete mode 100644 node_modules/yargs/locales/fr.json delete mode 100644 node_modules/yargs/locales/hi.json delete mode 100644 node_modules/yargs/locales/hu.json delete mode 100644 node_modules/yargs/locales/id.json delete mode 100644 node_modules/yargs/locales/it.json delete mode 100644 node_modules/yargs/locales/ja.json delete mode 100644 node_modules/yargs/locales/ko.json delete mode 100644 node_modules/yargs/locales/nb.json delete mode 100644 node_modules/yargs/locales/nl.json delete mode 100644 node_modules/yargs/locales/nn.json delete mode 100644 node_modules/yargs/locales/pirate.json delete mode 100644 node_modules/yargs/locales/pl.json delete mode 100644 node_modules/yargs/locales/pt.json delete mode 100644 node_modules/yargs/locales/pt_BR.json delete mode 100644 node_modules/yargs/locales/ru.json delete mode 100644 node_modules/yargs/locales/th.json delete mode 100644 node_modules/yargs/locales/tr.json delete mode 100644 node_modules/yargs/locales/uk_UA.json delete mode 100644 node_modules/yargs/locales/uz.json delete mode 100644 node_modules/yargs/locales/zh_CN.json delete mode 100644 node_modules/yargs/locales/zh_TW.json delete mode 100644 node_modules/yargs/package.json delete mode 100644 node_modules/yargs/yargs delete mode 100644 node_modules/yargs/yargs.mjs diff --git a/node_modules/.bin/nanoid b/node_modules/.bin/nanoid deleted file mode 120000 index e2be547..0000000 --- a/node_modules/.bin/nanoid +++ /dev/null @@ -1 +0,0 @@ -../nanoid/bin/nanoid.cjs \ No newline at end of file diff --git a/node_modules/.bin/postcss b/node_modules/.bin/postcss deleted file mode 120000 index 236af7c..0000000 --- a/node_modules/.bin/postcss +++ /dev/null @@ -1 +0,0 @@ -../postcss-cli/index.js \ No newline at end of file diff --git a/node_modules/.bin/yaml b/node_modules/.bin/yaml deleted file mode 120000 index 0368324..0000000 --- a/node_modules/.bin/yaml +++ /dev/null @@ -1 +0,0 @@ -../yaml/bin.mjs \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json deleted file mode 100644 index 7ff1e45..0000000 --- a/node_modules/.package-lock.json +++ /dev/null @@ -1,910 +0,0 @@ -{ - "name": "kgpz_web", - "lockfileVersion": 3, - "requires": true, - "packages": { - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@sindresorhus/merge-streams": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", - "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, - "license": "MIT" - }, - "node_modules/dependency-graph": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/dependency-graph/-/dependency-graph-0.11.0.tgz", - "integrity": "sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6.0" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true, - "license": "MIT" - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.17.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", - "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs-extra": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", - "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-stdin": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", - "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/globby": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.2.tgz", - "integrity": "sha512-s3Fq41ZVh7vbbe2PN3nrW7yC7U7MFVc5c98/iTl9c2GawNMKx/J648KQRW6WKkuU8GIbbh2IXfIRQjOZnXcTnw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", - "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/jsonfile": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", - "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/lilconfig": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", - "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/antonk52" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-type": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", - "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/postcss": { - "version": "8.4.49", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz", - "integrity": "sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "dependencies": { - "nanoid": "^3.3.7", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-cli": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/postcss-cli/-/postcss-cli-11.0.0.tgz", - "integrity": "sha512-xMITAI7M0u1yolVcXJ9XTZiO9aO49mcoKQy6pCDFdMh9kGqhzLVpWxeD/32M/QBmkhcGypZFFOLNLmIW4Pg4RA==", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^3.3.0", - "dependency-graph": "^0.11.0", - "fs-extra": "^11.0.0", - "get-stdin": "^9.0.0", - "globby": "^14.0.0", - "picocolors": "^1.0.0", - "postcss-load-config": "^5.0.0", - "postcss-reporter": "^7.0.0", - "pretty-hrtime": "^1.0.3", - "read-cache": "^1.0.0", - "slash": "^5.0.0", - "yargs": "^17.0.0" - }, - "bin": { - "postcss": "index.js" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.0.0" - } - }, - "node_modules/postcss-load-config": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-5.1.0.tgz", - "integrity": "sha512-G5AJ+IX0aD0dygOE0yFZQ/huFFMSNneyfp0e3/bT05a8OfPC5FUoZRPfGijUdGOJNMewJiwzcHJXFafFzeKFVA==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "lilconfig": "^3.1.1", - "yaml": "^2.4.2" - }, - "engines": { - "node": ">= 18" - }, - "peerDependencies": { - "jiti": ">=1.21.0", - "postcss": ">=8.0.9", - "tsx": "^4.8.1" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - }, - "postcss": { - "optional": true - }, - "tsx": { - "optional": true - } - } - }, - "node_modules/postcss-reporter": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.1.0.tgz", - "integrity": "sha512-/eoEylGWyy6/DOiMP5lmFRdmDKThqgn7D6hP2dXKJI/0rJSO1ADFNngZfDzxL0YAxFvws+Rtpuji1YIHj4mySA==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "picocolors": "^1.0.0", - "thenby": "^1.3.4" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/pretty-hrtime": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", - "integrity": "sha512-66hKPCr+72mlfiSjlEB1+45IjXSqvVAIy6mocupoww4tBFE9R9IhwwUGoI4G++Tc9Aq+2rxOt0RFU6gPcrte0A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/read-cache": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", - "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", - "dev": true, - "license": "MIT", - "dependencies": { - "pify": "^2.3.0" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/slash": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", - "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "dev": true, - "license": "BSD-3-Clause", - "peer": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/thenby": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/thenby/-/thenby-1.3.4.tgz", - "integrity": "sha512-89Gi5raiWA3QZ4b2ePcEwswC3me9JIg+ToSgtE0JWeCynLnLxNr/f9G+xfo9K+Oj4AFdom8YNJjibIARTJmapQ==", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yaml": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.6.1.tgz", - "integrity": "sha512-7r0XPzioN/Q9kXBro/XPnA6kznR73DHq+GXh5ON7ZozRO6aMjbmiBuKste2wslTFkC5d1dw0GooOCepZXJ2SAg==", - "dev": true, - "license": "ISC", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "license": "MIT", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - } - } -} diff --git a/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/@nodelib/fs.scandir/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.scandir/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.scandir/README.md b/node_modules/@nodelib/fs.scandir/README.md deleted file mode 100644 index e0b218b..0000000 --- a/node_modules/@nodelib/fs.scandir/README.md +++ /dev/null @@ -1,171 +0,0 @@ -# @nodelib/fs.scandir - -> List files and directories inside the specified directory. - -## :bulb: Highlights - -The package is aimed at obtaining information about entries in the directory. - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.scandir -``` - -## Usage - -```ts -import * as fsScandir from '@nodelib/fs.scandir'; - -fsScandir.scandir('path', (error, stats) => { /* … */ }); -``` - -## API - -### .scandir(path, [optionsOrSettings], callback) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. - -```ts -fsScandir.scandir('path', (error, entries) => { /* … */ }); -fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); -fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); -``` - -### .scandirSync(path, [optionsOrSettings]) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. - -```ts -const entries = fsScandir.scandirSync('path'); -const entries = fsScandir.scandirSync('path', {}); -const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsScandir.Settings({ followSymbolicLinks: false }); - -const entries = fsScandir.scandirSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. -* `stats` (optional) — An instance of `fs.Stats` class. - -For example, the `scandir` call for `tools` directory with one directory inside: - -```ts -{ - dirent: Dirent { name: 'typedoc', /* … */ }, - name: 'typedoc', - path: 'tools/typedoc' -} -``` - -## Options - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} - -const settings = new fsScandir.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## `old` and `modern` mode - -This package has two modes that are used depending on the environment and parameters of use. - -### old - -* Node.js below `10.10` or when the `stats` option is enabled - -When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). - -### modern - -* Node.js 10.10+ and the `stats` option is disabled - -In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. - -This mode makes fewer calls to the file system. It's faster. - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts deleted file mode 100644 index 827f1db..0000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type * as fsStat from '@nodelib/fs.stat'; -import type { Dirent, ErrnoException } from '../types'; -export interface ReaddirAsynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; - (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; -} -export interface ReaddirSynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }): Dirent[]; - (filepath: string): string[]; -} -export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { - readdir: ReaddirAsynchronousMethod; - readdirSync: ReaddirSynchronousMethod; -}; -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js deleted file mode 100644 index f0fe022..0000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts deleted file mode 100644 index 33f1749..0000000 --- a/node_modules/@nodelib/fs.scandir/out/constants.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/** - * IS `true` for Node.js 10.10 and greater. - */ -export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js deleted file mode 100644 index 7e3d441..0000000 --- a/node_modules/@nodelib/fs.scandir/out/constants.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; -const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { - throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); -} -const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); -const SUPPORTED_MAJOR_VERSION = 10; -const SUPPORTED_MINOR_VERSION = 10; -const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; -const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; -/** - * IS `true` for Node.js 10.10 and greater. - */ -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts deleted file mode 100644 index b9da83e..0000000 --- a/node_modules/@nodelib/fs.scandir/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Dirent, Entry } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function scandir(path: string, callback: AsyncCallback): void; -declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace scandir { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; -export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js deleted file mode 100644 index 99c70d3..0000000 --- a/node_modules/@nodelib/fs.scandir/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.scandirSync = exports.scandir = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function scandir(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.scandir = scandir; -function scandirSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.scandirSync = scandirSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts deleted file mode 100644 index 5829676..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -/// -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; -export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js deleted file mode 100644 index e8e2f0a..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.js +++ /dev/null @@ -1,104 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const rpl = require("run-parallel"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings, callback) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - readdirWithFileTypes(directory, settings, callback); - return; - } - readdir(directory, settings, callback); -} -exports.read = read; -function readdirWithFileTypes(directory, settings, callback) { - settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const entries = dirents.map((dirent) => ({ - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - })); - if (!settings.followSymbolicLinks) { - callSuccessCallback(callback, entries); - return; - } - const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); - rpl(tasks, (rplError, rplEntries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, rplEntries); - }); - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function makeRplTaskEntry(entry, settings) { - return (done) => { - if (!entry.dirent.isSymbolicLink()) { - done(null, entry); - return; - } - settings.fs.stat(entry.path, (statError, stats) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - done(statError); - return; - } - done(null, entry); - return; - } - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - done(null, entry); - }); - }; -} -function readdir(directory, settings, callback) { - settings.fs.readdir(directory, (readdirError, names) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const tasks = names.map((name) => { - const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - return (done) => { - fsStat.stat(path, settings.fsStatSettings, (error, stats) => { - if (error !== null) { - done(error); - return; - } - const entry = { - name, - path, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - done(null, entry); - }); - }; - }); - rpl(tasks, (rplError, entries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, entries); - }); - }); -} -exports.readdir = readdir; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts deleted file mode 100644 index 2b4d08b..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js deleted file mode 100644 index 8724cb5..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = void 0; -function joinPathSegments(a, b, separator) { - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts deleted file mode 100644 index e05c8f0..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare function read(directory: string, settings: Settings): Entry[]; -export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; -export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js deleted file mode 100644 index 146db34..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings); - } - return readdir(directory, settings); -} -exports.read = read; -function readdirWithFileTypes(directory, settings) { - const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); - return dirents.map((dirent) => { - const entry = { - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - }; - if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { - try { - const stats = settings.fs.statSync(entry.path); - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - } - catch (error) { - if (settings.throwErrorOnBrokenSymbolicLink) { - throw error; - } - } - } - return entry; - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function readdir(directory, settings) { - const names = settings.fs.readdirSync(directory); - return names.map((name) => { - const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - const stats = fsStat.statSync(entryPath, settings.fsStatSettings); - const entry = { - name, - path: entryPath, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - return entry; - }); -} -exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts deleted file mode 100644 index a0db115..0000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLinks: boolean; - readonly fs: fs.FileSystemAdapter; - readonly pathSegmentSeparator: string; - readonly stats: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly fsStatSettings: fsStat.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js deleted file mode 100644 index 15a3e8c..0000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.stats = this._getValue(this._options.stats, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - this.fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this.followSymbolicLinks, - fs: this.fs, - throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts deleted file mode 100644 index f326c5e..0000000 --- a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/// -import type * as fs from 'fs'; -export interface Entry { - dirent: Dirent; - name: string; - path: string; - stats?: Stats; -} -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; -export interface Dirent { - isBlockDevice: () => boolean; - isCharacterDevice: () => boolean; - isDirectory: () => boolean; - isFIFO: () => boolean; - isFile: () => boolean; - isSocket: () => boolean; - isSymbolicLink: () => boolean; - name: string; -} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.scandir/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts deleted file mode 100644 index bb863f1..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Dirent, Stats } from '../types'; -export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts deleted file mode 100644 index 1b41954..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import * as fs from './fs'; -export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js deleted file mode 100644 index f5de129..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.js +++ /dev/null @@ -1,5 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fs = void 0; -const fs = require("./fs"); -exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json deleted file mode 100644 index d3a8924..0000000 --- a/node_modules/@nodelib/fs.scandir/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.scandir", - "version": "2.1.5", - "description": "List files and directories inside the specified directory", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "scandir", - "readdir", - "dirent" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4", - "@types/run-parallel": "^1.1.0" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/@nodelib/fs.stat/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.stat/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.stat/README.md b/node_modules/@nodelib/fs.stat/README.md deleted file mode 100644 index 686f047..0000000 --- a/node_modules/@nodelib/fs.stat/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# @nodelib/fs.stat - -> Get the status of a file with some features. - -## :bulb: Highlights - -Wrapper around standard method `fs.lstat` and `fs.stat` with some features. - -* :beginner: Normally follows symbolic link. -* :gear: Can safely work with broken symbolic link. - -## Install - -```console -npm install @nodelib/fs.stat -``` - -## Usage - -```ts -import * as fsStat from '@nodelib/fs.stat'; - -fsStat.stat('path', (error, stats) => { /* … */ }); -``` - -## API - -### .stat(path, [optionsOrSettings], callback) - -Returns an instance of `fs.Stats` class for provided path with standard callback-style. - -```ts -fsStat.stat('path', (error, stats) => { /* … */ }); -fsStat.stat('path', {}, (error, stats) => { /* … */ }); -fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); -``` - -### .statSync(path, [optionsOrSettings]) - -Returns an instance of `fs.Stats` class for provided path. - -```ts -const stats = fsStat.stat('path'); -const stats = fsStat.stat('path', {}); -const stats = fsStat.stat('path', new fsStat.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsStat.Settings({ followSymbolicLink: false }); - -const stats = fsStat.stat('path', settings); -``` - -## Options - -### `followSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. - -### `markSymbolicLink` - -* Type: `boolean` -* Default: `false` - -Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). - -> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; -} - -const settings = new fsStat.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts deleted file mode 100644 index 3af759c..0000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -/// -import * as fs from 'fs'; -import type { ErrnoException } from '../types'; -export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; -export declare type StatSynchronousMethod = (path: string) => fs.Stats; -export interface FileSystemAdapter { - lstat: StatAsynchronousMethod; - stat: StatAsynchronousMethod; - lstatSync: StatSynchronousMethod; - statSync: StatSynchronousMethod; -} -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js deleted file mode 100644 index 8dc08c8..0000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts deleted file mode 100644 index f95db99..0000000 --- a/node_modules/@nodelib/fs.stat/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Stats } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function stat(path: string, callback: AsyncCallback): void; -declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace stat { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; -export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js deleted file mode 100644 index b23f751..0000000 --- a/node_modules/@nodelib/fs.stat/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.statSync = exports.stat = exports.Settings = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function stat(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.stat = stat; -function statSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.statSync = statSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts deleted file mode 100644 index 85423ce..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type Settings from '../settings'; -import type { ErrnoException, Stats } from '../types'; -export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; -export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js deleted file mode 100644 index 983ff0e..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings, callback) { - settings.fs.lstat(path, (lstatError, lstat) => { - if (lstatError !== null) { - callFailureCallback(callback, lstatError); - return; - } - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - callSuccessCallback(callback, lstat); - return; - } - settings.fs.stat(path, (statError, stat) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - callFailureCallback(callback, statError); - return; - } - callSuccessCallback(callback, lstat); - return; - } - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - callSuccessCallback(callback, stat); - }); - }); -} -exports.read = read; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts deleted file mode 100644 index 428c3d7..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type Settings from '../settings'; -import type { Stats } from '../types'; -export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js deleted file mode 100644 index 1521c36..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings) { - const lstat = settings.fs.lstatSync(path); - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return lstat; - } - try { - const stat = settings.fs.statSync(path); - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - return stat; - } - catch (error) { - if (!settings.throwErrorOnBrokenSymbolicLink) { - return lstat; - } - throw error; - } -} -exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts deleted file mode 100644 index f4b3d44..0000000 --- a/node_modules/@nodelib/fs.stat/out/settings.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLink?: boolean; - fs?: Partial; - markSymbolicLink?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLink: boolean; - readonly fs: fs.FileSystemAdapter; - readonly markSymbolicLink: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js deleted file mode 100644 index 111ec09..0000000 --- a/node_modules/@nodelib/fs.stat/out/settings.js +++ /dev/null @@ -1,16 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts deleted file mode 100644 index 74c08ed..0000000 --- a/node_modules/@nodelib/fs.stat/out/types/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import type * as fs from 'fs'; -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.stat/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json deleted file mode 100644 index f2540c2..0000000 --- a/node_modules/@nodelib/fs.stat/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@nodelib/fs.stat", - "version": "2.0.5", - "description": "Get the status of a file with some features", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "stat" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/@nodelib/fs.walk/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.walk/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.walk/README.md b/node_modules/@nodelib/fs.walk/README.md deleted file mode 100644 index 6ccc08d..0000000 --- a/node_modules/@nodelib/fs.walk/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# @nodelib/fs.walk - -> A library for efficiently walking a directory recursively. - -## :bulb: Highlights - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). -* :gear: Built-in directories/files and error filtering system. -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.walk -``` - -## Usage - -```ts -import * as fsWalk from '@nodelib/fs.walk'; - -fsWalk.walk('path', (error, entries) => { /* … */ }); -``` - -## API - -### .walk(path, [optionsOrSettings], callback) - -Reads the directory recursively and asynchronously. Requires a callback function. - -> :book: If you want to use the Promise API, use `util.promisify`. - -```ts -fsWalk.walk('path', (error, entries) => { /* … */ }); -fsWalk.walk('path', {}, (error, entries) => { /* … */ }); -fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); -``` - -### .walkStream(path, [optionsOrSettings]) - -Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. - -```ts -const stream = fsWalk.walkStream('path'); -const stream = fsWalk.walkStream('path', {}); -const stream = fsWalk.walkStream('path', new fsWalk.Settings()); -``` - -### .walkSync(path, [optionsOrSettings]) - -Reads the directory recursively and synchronously. Returns an array of entries. - -```ts -const entries = fsWalk.walkSync('path'); -const entries = fsWalk.walkSync('path', {}); -const entries = fsWalk.walkSync('path', new fsWalk.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsWalk.Settings({ followSymbolicLinks: true }); - -const entries = fsWalk.walkSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. -* [`stats`] — An instance of `fs.Stats` class. - -## Options - -### basePath - -* Type: `string` -* Default: `undefined` - -By default, all paths are built relative to the root path. You can use this option to set custom root path. - -In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. - -```ts -fsWalk.walkSync('root'); // → ['root/file.txt'] -fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] -``` - -### concurrency - -* Type: `number` -* Default: `Infinity` - -The maximum number of concurrent calls to `fs.readdir`. - -> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). - -### deepFilter - -* Type: [`DeepFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the directory will be read deep or not. - -```ts -// Skip all directories that starts with `node_modules` -const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); -``` - -### entryFilter - -* Type: [`EntryFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the entry will be included to results or not. - -```ts -// Exclude all `.js` files from results -const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); -``` - -### errorFilter - -* Type: [`ErrorFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that allows you to skip errors that occur when reading directories. - -For example, you can skip `ENOENT` errors if required: - -```ts -// Skip all ENOENT errors -const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; -``` - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: `FileSystemAdapter` -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; - readdir: typeof fs.readdir; - readdirSync: typeof fs.readdirSync; -} - -const settings = new fsWalk.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts deleted file mode 100644 index 8864c7b..0000000 --- a/node_modules/@nodelib/fs.walk/out/index.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import type { Readable } from 'stream'; -import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; -import { AsyncCallback } from './providers/async'; -import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; -import type { Entry } from './types'; -declare function walk(directory: string, callback: AsyncCallback): void; -declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace walk { - function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; -declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; -export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js deleted file mode 100644 index 1520787..0000000 --- a/node_modules/@nodelib/fs.walk/out/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function walk(directory, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); - return; - } - new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); -} -exports.walk = walk; -function walkSync(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new sync_1.default(directory, settings); - return provider.read(); -} -exports.walkSync = walkSync; -function walkStream(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new stream_1.default(directory, settings); - return provider.read(); -} -exports.walkStream = walkStream; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts deleted file mode 100644 index 0f6717d..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; -export default class AsyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - private readonly _storage; - constructor(_root: string, _settings: Settings); - read(callback: AsyncCallback): void; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js deleted file mode 100644 index 51d3be5..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -class AsyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._storage = []; - } - read(callback) { - this._reader.onError((error) => { - callFailureCallback(callback, error); - }); - this._reader.onEntry((entry) => { - this._storage.push(entry); - }); - this._reader.onEnd(() => { - callSuccessCallback(callback, this._storage); - }); - this._reader.read(); - } -} -exports.default = AsyncProvider; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, entries) { - callback(null, entries); -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts deleted file mode 100644 index 874f60c..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import AsyncProvider from './async'; -import StreamProvider from './stream'; -import SyncProvider from './sync'; -export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js deleted file mode 100644 index 4c2529c..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; -const async_1 = require("./async"); -exports.AsyncProvider = async_1.default; -const stream_1 = require("./stream"); -exports.StreamProvider = stream_1.default; -const sync_1 = require("./sync"); -exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts deleted file mode 100644 index 294185f..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -/// -import { Readable } from 'stream'; -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -export default class StreamProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - protected readonly _stream: Readable; - constructor(_root: string, _settings: Settings); - read(): Readable; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js deleted file mode 100644 index 51298b0..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const async_1 = require("../readers/async"); -class StreamProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._stream = new stream_1.Readable({ - objectMode: true, - read: () => { }, - destroy: () => { - if (!this._reader.isDestroyed) { - this._reader.destroy(); - } - } - }); - } - read() { - this._reader.onError((error) => { - this._stream.emit('error', error); - }); - this._reader.onEntry((entry) => { - this._stream.push(entry); - }); - this._reader.onEnd(() => { - this._stream.push(null); - }); - this._reader.read(); - return this._stream; - } -} -exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts deleted file mode 100644 index 551c42e..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import SyncReader from '../readers/sync'; -import type Settings from '../settings'; -import type { Entry } from '../types'; -export default class SyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: SyncReader; - constructor(_root: string, _settings: Settings); - read(): Entry[]; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js deleted file mode 100644 index faab6ca..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -class SyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new sync_1.default(this._root, this._settings); - } - read() { - return this._reader.read(); - } -} -exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts deleted file mode 100644 index 9acf4e6..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -/// -import { EventEmitter } from 'events'; -import * as fsScandir from '@nodelib/fs.scandir'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -import Reader from './reader'; -declare type EntryEventCallback = (entry: Entry) => void; -declare type ErrorEventCallback = (error: Errno) => void; -declare type EndEventCallback = () => void; -export default class AsyncReader extends Reader { - protected readonly _settings: Settings; - protected readonly _scandir: typeof fsScandir.scandir; - protected readonly _emitter: EventEmitter; - private readonly _queue; - private _isFatalError; - private _isDestroyed; - constructor(_root: string, _settings: Settings); - read(): EventEmitter; - get isDestroyed(): boolean; - destroy(): void; - onEntry(callback: EntryEventCallback): void; - onError(callback: ErrorEventCallback): void; - onEnd(callback: EndEventCallback): void; - private _pushToQueue; - private _worker; - private _handleError; - private _handleEntry; - private _emitEntry; -} -export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js deleted file mode 100644 index ebe8dd5..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.js +++ /dev/null @@ -1,97 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const events_1 = require("events"); -const fsScandir = require("@nodelib/fs.scandir"); -const fastq = require("fastq"); -const common = require("./common"); -const reader_1 = require("./reader"); -class AsyncReader extends reader_1.default { - constructor(_root, _settings) { - super(_root, _settings); - this._settings = _settings; - this._scandir = fsScandir.scandir; - this._emitter = new events_1.EventEmitter(); - this._queue = fastq(this._worker.bind(this), this._settings.concurrency); - this._isFatalError = false; - this._isDestroyed = false; - this._queue.drain = () => { - if (!this._isFatalError) { - this._emitter.emit('end'); - } - }; - } - read() { - this._isFatalError = false; - this._isDestroyed = false; - setImmediate(() => { - this._pushToQueue(this._root, this._settings.basePath); - }); - return this._emitter; - } - get isDestroyed() { - return this._isDestroyed; - } - destroy() { - if (this._isDestroyed) { - throw new Error('The reader is already destroyed'); - } - this._isDestroyed = true; - this._queue.killAndDrain(); - } - onEntry(callback) { - this._emitter.on('entry', callback); - } - onError(callback) { - this._emitter.once('error', callback); - } - onEnd(callback) { - this._emitter.once('end', callback); - } - _pushToQueue(directory, base) { - const queueItem = { directory, base }; - this._queue.push(queueItem, (error) => { - if (error !== null) { - this._handleError(error); - } - }); - } - _worker(item, done) { - this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { - if (error !== null) { - done(error, undefined); - return; - } - for (const entry of entries) { - this._handleEntry(entry, item.base); - } - done(null, undefined); - }); - } - _handleError(error) { - if (this._isDestroyed || !common.isFatalError(this._settings, error)) { - return; - } - this._isFatalError = true; - this._isDestroyed = true; - this._emitter.emit('error', error); - } - _handleEntry(entry, base) { - if (this._isDestroyed || this._isFatalError) { - return; - } - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._emitEntry(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _emitEntry(entry) { - this._emitter.emit('entry', entry); - } -} -exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts deleted file mode 100644 index 5985f97..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { FilterFunction } from '../settings'; -import type Settings from '../settings'; -import type { Errno } from '../types'; -export declare function isFatalError(settings: Settings, error: Errno): boolean; -export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; -export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js deleted file mode 100644 index a93572f..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; -function isFatalError(settings, error) { - if (settings.errorFilter === null) { - return true; - } - return !settings.errorFilter(error); -} -exports.isFatalError = isFatalError; -function isAppliedFilter(filter, value) { - return filter === null || filter(value); -} -exports.isAppliedFilter = isAppliedFilter; -function replacePathSegmentSeparator(filepath, separator) { - return filepath.split(/[/\\]/).join(separator); -} -exports.replacePathSegmentSeparator = replacePathSegmentSeparator; -function joinPathSegments(a, b, separator) { - if (a === '') { - return b; - } - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts deleted file mode 100644 index e1f383b..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type Settings from '../settings'; -export default class Reader { - protected readonly _root: string; - protected readonly _settings: Settings; - constructor(_root: string, _settings: Settings); -} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js deleted file mode 100644 index 782f07c..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const common = require("./common"); -class Reader { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); - } -} -exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts deleted file mode 100644 index af41033..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry } from '../types'; -import Reader from './reader'; -export default class SyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandirSync; - private readonly _storage; - private readonly _queue; - read(): Entry[]; - private _pushToQueue; - private _handleQueue; - private _handleDirectory; - private _handleError; - private _handleEntry; - private _pushToStorage; -} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js deleted file mode 100644 index 9a8d5a6..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsScandir = require("@nodelib/fs.scandir"); -const common = require("./common"); -const reader_1 = require("./reader"); -class SyncReader extends reader_1.default { - constructor() { - super(...arguments); - this._scandir = fsScandir.scandirSync; - this._storage = []; - this._queue = new Set(); - } - read() { - this._pushToQueue(this._root, this._settings.basePath); - this._handleQueue(); - return this._storage; - } - _pushToQueue(directory, base) { - this._queue.add({ directory, base }); - } - _handleQueue() { - for (const item of this._queue.values()) { - this._handleDirectory(item.directory, item.base); - } - } - _handleDirectory(directory, base) { - try { - const entries = this._scandir(directory, this._settings.fsScandirSettings); - for (const entry of entries) { - this._handleEntry(entry, base); - } - } - catch (error) { - this._handleError(error); - } - } - _handleError(error) { - if (!common.isFatalError(this._settings, error)) { - return; - } - throw error; - } - _handleEntry(entry, base) { - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._pushToStorage(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _pushToStorage(entry) { - this._storage.push(entry); - } -} -exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts deleted file mode 100644 index d1c4b45..0000000 --- a/node_modules/@nodelib/fs.walk/out/settings.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry, Errno } from './types'; -export declare type FilterFunction = (value: T) => boolean; -export declare type DeepFilterFunction = FilterFunction; -export declare type EntryFilterFunction = FilterFunction; -export declare type ErrorFilterFunction = FilterFunction; -export interface Options { - basePath?: string; - concurrency?: number; - deepFilter?: DeepFilterFunction; - entryFilter?: EntryFilterFunction; - errorFilter?: ErrorFilterFunction; - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly basePath?: string; - readonly concurrency: number; - readonly deepFilter: DeepFilterFunction | null; - readonly entryFilter: EntryFilterFunction | null; - readonly errorFilter: ErrorFilterFunction | null; - readonly pathSegmentSeparator: string; - readonly fsScandirSettings: fsScandir.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js deleted file mode 100644 index d7a85c8..0000000 --- a/node_modules/@nodelib/fs.walk/out/settings.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsScandir = require("@nodelib/fs.scandir"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); - this.deepFilter = this._getValue(this._options.deepFilter, null); - this.entryFilter = this._getValue(this._options.entryFilter, null); - this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: this._options.followSymbolicLinks, - fs: this._options.fs, - pathSegmentSeparator: this._options.pathSegmentSeparator, - stats: this._options.stats, - throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts deleted file mode 100644 index 6ee9bd3..0000000 --- a/node_modules/@nodelib/fs.walk/out/types/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/// -import type * as scandir from '@nodelib/fs.scandir'; -export declare type Entry = scandir.Entry; -export declare type Errno = NodeJS.ErrnoException; -export interface QueueItem { - directory: string; - base?: string; -} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.walk/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json deleted file mode 100644 index 86bfce4..0000000 --- a/node_modules/@nodelib/fs.walk/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.walk", - "version": "1.2.8", - "description": "A library for efficiently walking a directory recursively", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "walk", - "scanner", - "crawler" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*", - "!out/**/tests/**" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" -} diff --git a/node_modules/@sindresorhus/merge-streams/index.d.ts b/node_modules/@sindresorhus/merge-streams/index.d.ts deleted file mode 100644 index bace371..0000000 --- a/node_modules/@sindresorhus/merge-streams/index.d.ts +++ /dev/null @@ -1,44 +0,0 @@ -import {type Readable} from 'node:stream'; - -/** -Merges an array of [readable streams](https://nodejs.org/api/stream.html#readable-streams) and returns a new readable stream that emits data from the individual streams as it arrives. - -If you provide an empty array, it returns an already-ended stream. - -@example -``` -import mergeStreams from '@sindresorhus/merge-streams'; - -const stream = mergeStreams([streamA, streamB]); - -for await (const chunk of stream) { - console.log(chunk); - //=> 'A1' - //=> 'B1' - //=> 'A2' - //=> 'B2' -} -``` -*/ -export default function mergeStreams(streams: Readable[]): MergedStream; - -/** -A single stream combining the output of multiple streams. -*/ -export class MergedStream extends Readable { - /** - Pipe a new readable stream. - - Throws if `MergedStream` has already ended. - */ - add(stream: Readable): void; - - /** - Unpipe a stream previously added using either `mergeStreams(streams)` or `MergedStream.add(stream)`. - - Returns `false` if the stream was not previously added, or if it was already removed by `MergedStream.remove(stream)`. - - The removed stream is not automatically ended. - */ - remove(stream: Readable): boolean; -} diff --git a/node_modules/@sindresorhus/merge-streams/index.js b/node_modules/@sindresorhus/merge-streams/index.js deleted file mode 100644 index f44828e..0000000 --- a/node_modules/@sindresorhus/merge-streams/index.js +++ /dev/null @@ -1,223 +0,0 @@ -import {on, once} from 'node:events'; -import {PassThrough as PassThroughStream} from 'node:stream'; -import {finished} from 'node:stream/promises'; - -export default function mergeStreams(streams) { - if (!Array.isArray(streams)) { - throw new TypeError(`Expected an array, got \`${typeof streams}\`.`); - } - - for (const stream of streams) { - validateStream(stream); - } - - const objectMode = streams.some(({readableObjectMode}) => readableObjectMode); - const highWaterMark = getHighWaterMark(streams, objectMode); - const passThroughStream = new MergedStream({ - objectMode, - writableHighWaterMark: highWaterMark, - readableHighWaterMark: highWaterMark, - }); - - for (const stream of streams) { - passThroughStream.add(stream); - } - - if (streams.length === 0) { - endStream(passThroughStream); - } - - return passThroughStream; -} - -const getHighWaterMark = (streams, objectMode) => { - if (streams.length === 0) { - // @todo Use `node:stream` `getDefaultHighWaterMark(objectMode)` in next major release - return 16_384; - } - - const highWaterMarks = streams - .filter(({readableObjectMode}) => readableObjectMode === objectMode) - .map(({readableHighWaterMark}) => readableHighWaterMark); - return Math.max(...highWaterMarks); -}; - -class MergedStream extends PassThroughStream { - #streams = new Set([]); - #ended = new Set([]); - #aborted = new Set([]); - #onFinished; - - add(stream) { - validateStream(stream); - - if (this.#streams.has(stream)) { - return; - } - - this.#streams.add(stream); - - this.#onFinished ??= onMergedStreamFinished(this, this.#streams); - endWhenStreamsDone({ - passThroughStream: this, - stream, - streams: this.#streams, - ended: this.#ended, - aborted: this.#aborted, - onFinished: this.#onFinished, - }); - - stream.pipe(this, {end: false}); - } - - remove(stream) { - validateStream(stream); - - if (!this.#streams.has(stream)) { - return false; - } - - stream.unpipe(this); - return true; - } -} - -const onMergedStreamFinished = async (passThroughStream, streams) => { - updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_COUNT); - const controller = new AbortController(); - - try { - await Promise.race([ - onMergedStreamEnd(passThroughStream, controller), - onInputStreamsUnpipe(passThroughStream, streams, controller), - ]); - } finally { - controller.abort(); - updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_COUNT); - } -}; - -const onMergedStreamEnd = async (passThroughStream, {signal}) => { - await finished(passThroughStream, {signal, cleanup: true}); -}; - -const onInputStreamsUnpipe = async (passThroughStream, streams, {signal}) => { - for await (const [unpipedStream] of on(passThroughStream, 'unpipe', {signal})) { - if (streams.has(unpipedStream)) { - unpipedStream.emit(unpipeEvent); - } - } -}; - -const validateStream = stream => { - if (typeof stream?.pipe !== 'function') { - throw new TypeError(`Expected a readable stream, got: \`${typeof stream}\`.`); - } -}; - -const endWhenStreamsDone = async ({passThroughStream, stream, streams, ended, aborted, onFinished}) => { - updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_PER_STREAM); - const controller = new AbortController(); - - try { - await Promise.race([ - afterMergedStreamFinished(onFinished, stream), - onInputStreamEnd({passThroughStream, stream, streams, ended, aborted, controller}), - onInputStreamUnpipe({stream, streams, ended, aborted, controller}), - ]); - } finally { - controller.abort(); - updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_PER_STREAM); - } - - if (streams.size === ended.size + aborted.size) { - if (ended.size === 0 && aborted.size > 0) { - abortStream(passThroughStream); - } else { - endStream(passThroughStream); - } - } -}; - -// This is the error thrown by `finished()` on `stream.destroy()` -const isAbortError = error => error?.code === 'ERR_STREAM_PREMATURE_CLOSE'; - -const afterMergedStreamFinished = async (onFinished, stream) => { - try { - await onFinished; - abortStream(stream); - } catch (error) { - if (isAbortError(error)) { - abortStream(stream); - } else { - errorStream(stream, error); - } - } -}; - -const onInputStreamEnd = async ({passThroughStream, stream, streams, ended, aborted, controller: {signal}}) => { - try { - await finished(stream, {signal, cleanup: true, readable: true, writable: false}); - if (streams.has(stream)) { - ended.add(stream); - } - } catch (error) { - if (signal.aborted || !streams.has(stream)) { - return; - } - - if (isAbortError(error)) { - aborted.add(stream); - } else { - errorStream(passThroughStream, error); - } - } -}; - -const onInputStreamUnpipe = async ({stream, streams, ended, aborted, controller: {signal}}) => { - await once(stream, unpipeEvent, {signal}); - streams.delete(stream); - ended.delete(stream); - aborted.delete(stream); -}; - -const unpipeEvent = Symbol('unpipe'); - -const endStream = stream => { - if (stream.writable) { - stream.end(); - } -}; - -const abortStream = stream => { - if (stream.readable || stream.writable) { - stream.destroy(); - } -}; - -// `stream.destroy(error)` crashes the process with `uncaughtException` if no `error` event listener exists on `stream`. -// We take care of error handling on user behalf, so we do not want this to happen. -const errorStream = (stream, error) => { - if (!stream.destroyed) { - stream.once('error', noop); - stream.destroy(error); - } -}; - -const noop = () => {}; - -const updateMaxListeners = (passThroughStream, increment) => { - const maxListeners = passThroughStream.getMaxListeners(); - if (maxListeners !== 0 && maxListeners !== Number.POSITIVE_INFINITY) { - passThroughStream.setMaxListeners(maxListeners + increment); - } -}; - -// Number of times `passThroughStream.on()` is called regardless of streams: -// - once due to `finished(passThroughStream)` -// - once due to `on(passThroughStream)` -const PASSTHROUGH_LISTENERS_COUNT = 2; - -// Number of times `passThroughStream.on()` is called per stream: -// - once due to `stream.pipe(passThroughStream)` -const PASSTHROUGH_LISTENERS_PER_STREAM = 1; diff --git a/node_modules/@sindresorhus/merge-streams/license b/node_modules/@sindresorhus/merge-streams/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/@sindresorhus/merge-streams/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@sindresorhus/merge-streams/package.json b/node_modules/@sindresorhus/merge-streams/package.json deleted file mode 100644 index 94f4bdb..0000000 --- a/node_modules/@sindresorhus/merge-streams/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "@sindresorhus/merge-streams", - "version": "2.3.0", - "description": "Merge multiple streams into a unified stream", - "license": "MIT", - "repository": "sindresorhus/merge-streams", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": { - "types": "./index.d.ts", - "default": "./index.js" - }, - "sideEffects": false, - "engines": { - "node": ">=18" - }, - "scripts": { - "test": "xo && c8 ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "merge", - "stream", - "streams", - "readable", - "passthrough", - "interleave", - "interleaved", - "unify", - "unified" - ], - "devDependencies": { - "@types/node": "^20.8.9", - "ava": "^6.1.0", - "c8": "^9.1.0", - "tempfile": "^5.0.0", - "tsd": "^0.30.4", - "typescript": "^5.2.2", - "xo": "^0.56.0" - } -} diff --git a/node_modules/@sindresorhus/merge-streams/readme.md b/node_modules/@sindresorhus/merge-streams/readme.md deleted file mode 100644 index 647c43e..0000000 --- a/node_modules/@sindresorhus/merge-streams/readme.md +++ /dev/null @@ -1,53 +0,0 @@ -# merge-streams - -> Merge multiple streams into a unified stream - -## Install - -```sh -npm install @sindresorhus/merge-streams -``` - -## Usage - -```js -import mergeStreams from '@sindresorhus/merge-streams'; - -const stream = mergeStreams([streamA, streamB]); - -for await (const chunk of stream) { - console.log(chunk); - //=> 'A1' - //=> 'B1' - //=> 'A2' - //=> 'B2' -} -``` - -## API - -### `mergeStreams(streams: stream.Readable[]): MergedStream` - -Merges an array of [readable streams](https://nodejs.org/api/stream.html#readable-streams) and returns a new readable stream that emits data from the individual streams as it arrives. - -If you provide an empty array, it returns an already-ended stream. - -#### `MergedStream` - -_Type_: `stream.Readable` - -A single stream combining the output of multiple streams. - -##### `MergedStream.add(stream: stream.Readable): void` - -Pipe a new readable stream. - -Throws if `MergedStream` has already ended. - -##### `MergedStream.remove(stream: stream.Readable): boolean` - -Unpipe a stream previously added using either [`mergeStreams(streams)`](#mergestreamsstreams-streamreadable-mergedstream) or [`MergedStream.add(stream)`](#mergedstreamaddstream-streamreadable-void). - -Returns `false` if the stream was not previously added, or if it was already removed by `MergedStream.remove(stream)`. - -The removed stream is not automatically ended. diff --git a/node_modules/ansi-regex/index.d.ts b/node_modules/ansi-regex/index.d.ts deleted file mode 100644 index 2dbf6af..0000000 --- a/node_modules/ansi-regex/index.d.ts +++ /dev/null @@ -1,37 +0,0 @@ -declare namespace ansiRegex { - interface Options { - /** - Match only the first ANSI escape. - - @default false - */ - onlyFirst: boolean; - } -} - -/** -Regular expression for matching ANSI escape codes. - -@example -``` -import ansiRegex = require('ansi-regex'); - -ansiRegex().test('\u001B[4mcake\u001B[0m'); -//=> true - -ansiRegex().test('cake'); -//=> false - -'\u001B[4mcake\u001B[0m'.match(ansiRegex()); -//=> ['\u001B[4m', '\u001B[0m'] - -'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); -//=> ['\u001B[4m'] - -'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); -//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] -``` -*/ -declare function ansiRegex(options?: ansiRegex.Options): RegExp; - -export = ansiRegex; diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js deleted file mode 100644 index 616ff83..0000000 --- a/node_modules/ansi-regex/index.js +++ /dev/null @@ -1,10 +0,0 @@ -'use strict'; - -module.exports = ({onlyFirst = false} = {}) => { - const pattern = [ - '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', - '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' - ].join('|'); - - return new RegExp(pattern, onlyFirst ? undefined : 'g'); -}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/ansi-regex/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json deleted file mode 100644 index 017f531..0000000 --- a/node_modules/ansi-regex/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "ansi-regex", - "version": "5.0.1", - "description": "Regular expression for matching ANSI escape codes", - "license": "MIT", - "repository": "chalk/ansi-regex", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd", - "view-supported": "node fixtures/view-codes.js" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "ansi", - "styles", - "color", - "colour", - "colors", - "terminal", - "console", - "cli", - "string", - "tty", - "escape", - "formatting", - "rgb", - "256", - "shell", - "xterm", - "command-line", - "text", - "regex", - "regexp", - "re", - "match", - "test", - "find", - "pattern" - ], - "devDependencies": { - "ava": "^2.4.0", - "tsd": "^0.9.0", - "xo": "^0.25.3" - } -} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md deleted file mode 100644 index 4d848bc..0000000 --- a/node_modules/ansi-regex/readme.md +++ /dev/null @@ -1,78 +0,0 @@ -# ansi-regex - -> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) - - -## Install - -``` -$ npm install ansi-regex -``` - - -## Usage - -```js -const ansiRegex = require('ansi-regex'); - -ansiRegex().test('\u001B[4mcake\u001B[0m'); -//=> true - -ansiRegex().test('cake'); -//=> false - -'\u001B[4mcake\u001B[0m'.match(ansiRegex()); -//=> ['\u001B[4m', '\u001B[0m'] - -'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); -//=> ['\u001B[4m'] - -'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); -//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] -``` - - -## API - -### ansiRegex(options?) - -Returns a regex for matching ANSI escape codes. - -#### options - -Type: `object` - -##### onlyFirst - -Type: `boolean`
-Default: `false` *(Matches any ANSI escape codes in a string)* - -Match only the first ANSI escape. - - -## FAQ - -### Why do you test for codes not in the ECMA 48 standard? - -Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. - -On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. - - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [Josh Junon](https://github.com/qix-) - - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/ansi-styles/index.d.ts b/node_modules/ansi-styles/index.d.ts deleted file mode 100644 index 44a907e..0000000 --- a/node_modules/ansi-styles/index.d.ts +++ /dev/null @@ -1,345 +0,0 @@ -declare type CSSColor = - | 'aliceblue' - | 'antiquewhite' - | 'aqua' - | 'aquamarine' - | 'azure' - | 'beige' - | 'bisque' - | 'black' - | 'blanchedalmond' - | 'blue' - | 'blueviolet' - | 'brown' - | 'burlywood' - | 'cadetblue' - | 'chartreuse' - | 'chocolate' - | 'coral' - | 'cornflowerblue' - | 'cornsilk' - | 'crimson' - | 'cyan' - | 'darkblue' - | 'darkcyan' - | 'darkgoldenrod' - | 'darkgray' - | 'darkgreen' - | 'darkgrey' - | 'darkkhaki' - | 'darkmagenta' - | 'darkolivegreen' - | 'darkorange' - | 'darkorchid' - | 'darkred' - | 'darksalmon' - | 'darkseagreen' - | 'darkslateblue' - | 'darkslategray' - | 'darkslategrey' - | 'darkturquoise' - | 'darkviolet' - | 'deeppink' - | 'deepskyblue' - | 'dimgray' - | 'dimgrey' - | 'dodgerblue' - | 'firebrick' - | 'floralwhite' - | 'forestgreen' - | 'fuchsia' - | 'gainsboro' - | 'ghostwhite' - | 'gold' - | 'goldenrod' - | 'gray' - | 'green' - | 'greenyellow' - | 'grey' - | 'honeydew' - | 'hotpink' - | 'indianred' - | 'indigo' - | 'ivory' - | 'khaki' - | 'lavender' - | 'lavenderblush' - | 'lawngreen' - | 'lemonchiffon' - | 'lightblue' - | 'lightcoral' - | 'lightcyan' - | 'lightgoldenrodyellow' - | 'lightgray' - | 'lightgreen' - | 'lightgrey' - | 'lightpink' - | 'lightsalmon' - | 'lightseagreen' - | 'lightskyblue' - | 'lightslategray' - | 'lightslategrey' - | 'lightsteelblue' - | 'lightyellow' - | 'lime' - | 'limegreen' - | 'linen' - | 'magenta' - | 'maroon' - | 'mediumaquamarine' - | 'mediumblue' - | 'mediumorchid' - | 'mediumpurple' - | 'mediumseagreen' - | 'mediumslateblue' - | 'mediumspringgreen' - | 'mediumturquoise' - | 'mediumvioletred' - | 'midnightblue' - | 'mintcream' - | 'mistyrose' - | 'moccasin' - | 'navajowhite' - | 'navy' - | 'oldlace' - | 'olive' - | 'olivedrab' - | 'orange' - | 'orangered' - | 'orchid' - | 'palegoldenrod' - | 'palegreen' - | 'paleturquoise' - | 'palevioletred' - | 'papayawhip' - | 'peachpuff' - | 'peru' - | 'pink' - | 'plum' - | 'powderblue' - | 'purple' - | 'rebeccapurple' - | 'red' - | 'rosybrown' - | 'royalblue' - | 'saddlebrown' - | 'salmon' - | 'sandybrown' - | 'seagreen' - | 'seashell' - | 'sienna' - | 'silver' - | 'skyblue' - | 'slateblue' - | 'slategray' - | 'slategrey' - | 'snow' - | 'springgreen' - | 'steelblue' - | 'tan' - | 'teal' - | 'thistle' - | 'tomato' - | 'turquoise' - | 'violet' - | 'wheat' - | 'white' - | 'whitesmoke' - | 'yellow' - | 'yellowgreen'; - -declare namespace ansiStyles { - interface ColorConvert { - /** - The RGB color space. - - @param red - (`0`-`255`) - @param green - (`0`-`255`) - @param blue - (`0`-`255`) - */ - rgb(red: number, green: number, blue: number): string; - - /** - The RGB HEX color space. - - @param hex - A hexadecimal string containing RGB data. - */ - hex(hex: string): string; - - /** - @param keyword - A CSS color name. - */ - keyword(keyword: CSSColor): string; - - /** - The HSL color space. - - @param hue - (`0`-`360`) - @param saturation - (`0`-`100`) - @param lightness - (`0`-`100`) - */ - hsl(hue: number, saturation: number, lightness: number): string; - - /** - The HSV color space. - - @param hue - (`0`-`360`) - @param saturation - (`0`-`100`) - @param value - (`0`-`100`) - */ - hsv(hue: number, saturation: number, value: number): string; - - /** - The HSV color space. - - @param hue - (`0`-`360`) - @param whiteness - (`0`-`100`) - @param blackness - (`0`-`100`) - */ - hwb(hue: number, whiteness: number, blackness: number): string; - - /** - Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color. - */ - ansi(ansi: number): string; - - /** - Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. - */ - ansi256(ansi: number): string; - } - - interface CSPair { - /** - The ANSI terminal control sequence for starting this style. - */ - readonly open: string; - - /** - The ANSI terminal control sequence for ending this style. - */ - readonly close: string; - } - - interface ColorBase { - readonly ansi: ColorConvert; - readonly ansi256: ColorConvert; - readonly ansi16m: ColorConvert; - - /** - The ANSI terminal control sequence for ending this color. - */ - readonly close: string; - } - - interface Modifier { - /** - Resets the current color chain. - */ - readonly reset: CSPair; - - /** - Make text bold. - */ - readonly bold: CSPair; - - /** - Emitting only a small amount of light. - */ - readonly dim: CSPair; - - /** - Make text italic. (Not widely supported) - */ - readonly italic: CSPair; - - /** - Make text underline. (Not widely supported) - */ - readonly underline: CSPair; - - /** - Inverse background and foreground colors. - */ - readonly inverse: CSPair; - - /** - Prints the text, but makes it invisible. - */ - readonly hidden: CSPair; - - /** - Puts a horizontal line through the center of the text. (Not widely supported) - */ - readonly strikethrough: CSPair; - } - - interface ForegroundColor { - readonly black: CSPair; - readonly red: CSPair; - readonly green: CSPair; - readonly yellow: CSPair; - readonly blue: CSPair; - readonly cyan: CSPair; - readonly magenta: CSPair; - readonly white: CSPair; - - /** - Alias for `blackBright`. - */ - readonly gray: CSPair; - - /** - Alias for `blackBright`. - */ - readonly grey: CSPair; - - readonly blackBright: CSPair; - readonly redBright: CSPair; - readonly greenBright: CSPair; - readonly yellowBright: CSPair; - readonly blueBright: CSPair; - readonly cyanBright: CSPair; - readonly magentaBright: CSPair; - readonly whiteBright: CSPair; - } - - interface BackgroundColor { - readonly bgBlack: CSPair; - readonly bgRed: CSPair; - readonly bgGreen: CSPair; - readonly bgYellow: CSPair; - readonly bgBlue: CSPair; - readonly bgCyan: CSPair; - readonly bgMagenta: CSPair; - readonly bgWhite: CSPair; - - /** - Alias for `bgBlackBright`. - */ - readonly bgGray: CSPair; - - /** - Alias for `bgBlackBright`. - */ - readonly bgGrey: CSPair; - - readonly bgBlackBright: CSPair; - readonly bgRedBright: CSPair; - readonly bgGreenBright: CSPair; - readonly bgYellowBright: CSPair; - readonly bgBlueBright: CSPair; - readonly bgCyanBright: CSPair; - readonly bgMagentaBright: CSPair; - readonly bgWhiteBright: CSPair; - } -} - -declare const ansiStyles: { - readonly modifier: ansiStyles.Modifier; - readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase; - readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase; - readonly codes: ReadonlyMap; -} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier; - -export = ansiStyles; diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js deleted file mode 100644 index 5d82581..0000000 --- a/node_modules/ansi-styles/index.js +++ /dev/null @@ -1,163 +0,0 @@ -'use strict'; - -const wrapAnsi16 = (fn, offset) => (...args) => { - const code = fn(...args); - return `\u001B[${code + offset}m`; -}; - -const wrapAnsi256 = (fn, offset) => (...args) => { - const code = fn(...args); - return `\u001B[${38 + offset};5;${code}m`; -}; - -const wrapAnsi16m = (fn, offset) => (...args) => { - const rgb = fn(...args); - return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; -}; - -const ansi2ansi = n => n; -const rgb2rgb = (r, g, b) => [r, g, b]; - -const setLazyProperty = (object, property, get) => { - Object.defineProperty(object, property, { - get: () => { - const value = get(); - - Object.defineProperty(object, property, { - value, - enumerable: true, - configurable: true - }); - - return value; - }, - enumerable: true, - configurable: true - }); -}; - -/** @type {typeof import('color-convert')} */ -let colorConvert; -const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => { - if (colorConvert === undefined) { - colorConvert = require('color-convert'); - } - - const offset = isBackground ? 10 : 0; - const styles = {}; - - for (const [sourceSpace, suite] of Object.entries(colorConvert)) { - const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace; - if (sourceSpace === targetSpace) { - styles[name] = wrap(identity, offset); - } else if (typeof suite === 'object') { - styles[name] = wrap(suite[targetSpace], offset); - } - } - - return styles; -}; - -function assembleStyles() { - const codes = new Map(); - const styles = { - modifier: { - reset: [0, 0], - // 21 isn't widely supported and 22 does the same thing - bold: [1, 22], - dim: [2, 22], - italic: [3, 23], - underline: [4, 24], - inverse: [7, 27], - hidden: [8, 28], - strikethrough: [9, 29] - }, - color: { - black: [30, 39], - red: [31, 39], - green: [32, 39], - yellow: [33, 39], - blue: [34, 39], - magenta: [35, 39], - cyan: [36, 39], - white: [37, 39], - - // Bright color - blackBright: [90, 39], - redBright: [91, 39], - greenBright: [92, 39], - yellowBright: [93, 39], - blueBright: [94, 39], - magentaBright: [95, 39], - cyanBright: [96, 39], - whiteBright: [97, 39] - }, - bgColor: { - bgBlack: [40, 49], - bgRed: [41, 49], - bgGreen: [42, 49], - bgYellow: [43, 49], - bgBlue: [44, 49], - bgMagenta: [45, 49], - bgCyan: [46, 49], - bgWhite: [47, 49], - - // Bright color - bgBlackBright: [100, 49], - bgRedBright: [101, 49], - bgGreenBright: [102, 49], - bgYellowBright: [103, 49], - bgBlueBright: [104, 49], - bgMagentaBright: [105, 49], - bgCyanBright: [106, 49], - bgWhiteBright: [107, 49] - } - }; - - // Alias bright black as gray (and grey) - styles.color.gray = styles.color.blackBright; - styles.bgColor.bgGray = styles.bgColor.bgBlackBright; - styles.color.grey = styles.color.blackBright; - styles.bgColor.bgGrey = styles.bgColor.bgBlackBright; - - for (const [groupName, group] of Object.entries(styles)) { - for (const [styleName, style] of Object.entries(group)) { - styles[styleName] = { - open: `\u001B[${style[0]}m`, - close: `\u001B[${style[1]}m` - }; - - group[styleName] = styles[styleName]; - - codes.set(style[0], style[1]); - } - - Object.defineProperty(styles, groupName, { - value: group, - enumerable: false - }); - } - - Object.defineProperty(styles, 'codes', { - value: codes, - enumerable: false - }); - - styles.color.close = '\u001B[39m'; - styles.bgColor.close = '\u001B[49m'; - - setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false)); - setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false)); - setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false)); - setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true)); - setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true)); - setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true)); - - return styles; -} - -// Make the export immutable -Object.defineProperty(module, 'exports', { - enumerable: true, - get: assembleStyles -}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/ansi-styles/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json deleted file mode 100644 index 7539328..0000000 --- a/node_modules/ansi-styles/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "ansi-styles", - "version": "4.3.0", - "description": "ANSI escape codes for styling strings in the terminal", - "license": "MIT", - "repository": "chalk/ansi-styles", - "funding": "https://github.com/chalk/ansi-styles?sponsor=1", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd", - "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "ansi", - "styles", - "color", - "colour", - "colors", - "terminal", - "console", - "cli", - "string", - "tty", - "escape", - "formatting", - "rgb", - "256", - "shell", - "xterm", - "log", - "logging", - "command-line", - "text" - ], - "dependencies": { - "color-convert": "^2.0.1" - }, - "devDependencies": { - "@types/color-convert": "^1.9.0", - "ava": "^2.3.0", - "svg-term-cli": "^2.1.1", - "tsd": "^0.11.0", - "xo": "^0.25.3" - } -} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md deleted file mode 100644 index 24883de..0000000 --- a/node_modules/ansi-styles/readme.md +++ /dev/null @@ -1,152 +0,0 @@ -# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) - -> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal - -You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. - - - -## Install - -``` -$ npm install ansi-styles -``` - -## Usage - -```js -const style = require('ansi-styles'); - -console.log(`${style.green.open}Hello world!${style.green.close}`); - - -// Color conversion between 16/256/truecolor -// NOTE: If conversion goes to 16 colors or 256 colors, the original color -// may be degraded to fit that color palette. This means terminals -// that do not support 16 million colors will best-match the -// original color. -console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); -console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); -console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close); -``` - -## API - -Each style has an `open` and `close` property. - -## Styles - -### Modifiers - -- `reset` -- `bold` -- `dim` -- `italic` *(Not widely supported)* -- `underline` -- `inverse` -- `hidden` -- `strikethrough` *(Not widely supported)* - -### Colors - -- `black` -- `red` -- `green` -- `yellow` -- `blue` -- `magenta` -- `cyan` -- `white` -- `blackBright` (alias: `gray`, `grey`) -- `redBright` -- `greenBright` -- `yellowBright` -- `blueBright` -- `magentaBright` -- `cyanBright` -- `whiteBright` - -### Background colors - -- `bgBlack` -- `bgRed` -- `bgGreen` -- `bgYellow` -- `bgBlue` -- `bgMagenta` -- `bgCyan` -- `bgWhite` -- `bgBlackBright` (alias: `bgGray`, `bgGrey`) -- `bgRedBright` -- `bgGreenBright` -- `bgYellowBright` -- `bgBlueBright` -- `bgMagentaBright` -- `bgCyanBright` -- `bgWhiteBright` - -## Advanced usage - -By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. - -- `style.modifier` -- `style.color` -- `style.bgColor` - -###### Example - -```js -console.log(style.color.green.open); -``` - -Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. - -###### Example - -```js -console.log(style.codes.get(36)); -//=> 39 -``` - -## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) - -`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. - -The following color spaces from `color-convert` are supported: - -- `rgb` -- `hex` -- `keyword` -- `hsl` -- `hsv` -- `hwb` -- `ansi` -- `ansi256` - -To use these, call the associated conversion function with the intended output, for example: - -```js -style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code -style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code - -style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code -style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code - -style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code -style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code -``` - -## Related - -- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [Josh Junon](https://github.com/qix-) - -## For enterprise - -Available as part of the Tidelift Subscription. - -The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/anymatch/LICENSE b/node_modules/anymatch/LICENSE deleted file mode 100644 index 491766c..0000000 --- a/node_modules/anymatch/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/anymatch/README.md b/node_modules/anymatch/README.md deleted file mode 100644 index 1dd67f5..0000000 --- a/node_modules/anymatch/README.md +++ /dev/null @@ -1,87 +0,0 @@ -anymatch [![Build Status](https://travis-ci.org/micromatch/anymatch.svg?branch=master)](https://travis-ci.org/micromatch/anymatch) [![Coverage Status](https://img.shields.io/coveralls/micromatch/anymatch.svg?branch=master)](https://coveralls.io/r/micromatch/anymatch?branch=master) -====== -Javascript module to match a string against a regular expression, glob, string, -or function that takes the string as an argument and returns a truthy or falsy -value. The matcher can also be an array of any or all of these. Useful for -allowing a very flexible user-defined config to define things like file paths. - -__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__ - - -Usage ------ -```sh -npm install anymatch -``` - -#### anymatch(matchers, testString, [returnIndex], [options]) -* __matchers__: (_Array|String|RegExp|Function_) -String to be directly matched, string with glob patterns, regular expression -test, function that takes the testString as an argument and returns a truthy -value if it should be matched, or an array of any number and mix of these types. -* __testString__: (_String|Array_) The string to test against the matchers. If -passed as an array, the first element of the array will be used as the -`testString` for non-function matchers, while the entire array will be applied -as the arguments for function matchers. -* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options. - * __returnIndex__: (_Boolean [optional]_) If true, return the array index of -the first matcher that that testString matched, or -1 if no match, instead of a -boolean result. - -```js -const anymatch = require('anymatch'); - -const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ; - -anymatch(matchers, 'path/to/file.js'); // true -anymatch(matchers, 'path/anyjs/baz.js'); // true -anymatch(matchers, 'path/to/foo.js'); // true -anymatch(matchers, 'path/to/bar.js'); // true -anymatch(matchers, 'bar.js'); // false - -// returnIndex = true -anymatch(matchers, 'foo.js', {returnIndex: true}); // 2 -anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1 - -// any picomatc - -// using globs to match directories and their children -anymatch('node_modules', 'node_modules'); // true -anymatch('node_modules', 'node_modules/somelib/index.js'); // false -anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true -anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false -anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true - -const matcher = anymatch(matchers); -['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ] -anymatch master* ❯ - -``` - -#### anymatch(matchers) -You can also pass in only your matcher(s) to get a curried function that has -already been bound to the provided matching criteria. This can be used as an -`Array#filter` callback. - -```js -var matcher = anymatch(matchers); - -matcher('path/to/file.js'); // true -matcher('path/anyjs/baz.js', true); // 1 - -['foo.js', 'bar.js'].filter(matcher); // ['foo.js'] -``` - -Changelog ----------- -[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases) - -- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only. -- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information). -- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch) -for glob pattern matching. Issues with glob pattern matching should be -reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues). - -License -------- -[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE) diff --git a/node_modules/anymatch/index.d.ts b/node_modules/anymatch/index.d.ts deleted file mode 100644 index 3ef7eaa..0000000 --- a/node_modules/anymatch/index.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -type AnymatchFn = (testString: string) => boolean; -type AnymatchPattern = string|RegExp|AnymatchFn; -type AnymatchMatcher = AnymatchPattern|AnymatchPattern[] -type AnymatchTester = { - (testString: string|any[], returnIndex: true): number; - (testString: string|any[]): boolean; -} - -type PicomatchOptions = {dot: boolean}; - -declare const anymatch: { - (matchers: AnymatchMatcher): AnymatchTester; - (matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester; - (matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number; - (matchers: AnymatchMatcher, testString: string|any[]): boolean; -} - -export {AnymatchMatcher as Matcher} -export {AnymatchTester as Tester} -export default anymatch diff --git a/node_modules/anymatch/index.js b/node_modules/anymatch/index.js deleted file mode 100644 index 8eb73e9..0000000 --- a/node_modules/anymatch/index.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, "__esModule", { value: true }); - -const picomatch = require('picomatch'); -const normalizePath = require('normalize-path'); - -/** - * @typedef {(testString: string) => boolean} AnymatchFn - * @typedef {string|RegExp|AnymatchFn} AnymatchPattern - * @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher - */ -const BANG = '!'; -const DEFAULT_OPTIONS = {returnIndex: false}; -const arrify = (item) => Array.isArray(item) ? item : [item]; - -/** - * @param {AnymatchPattern} matcher - * @param {object} options - * @returns {AnymatchFn} - */ -const createPattern = (matcher, options) => { - if (typeof matcher === 'function') { - return matcher; - } - if (typeof matcher === 'string') { - const glob = picomatch(matcher, options); - return (string) => matcher === string || glob(string); - } - if (matcher instanceof RegExp) { - return (string) => matcher.test(string); - } - return (string) => false; -}; - -/** - * @param {Array} patterns - * @param {Array} negPatterns - * @param {String|Array} args - * @param {Boolean} returnIndex - * @returns {boolean|number} - */ -const matchPatterns = (patterns, negPatterns, args, returnIndex) => { - const isList = Array.isArray(args); - const _path = isList ? args[0] : args; - if (!isList && typeof _path !== 'string') { - throw new TypeError('anymatch: second argument must be a string: got ' + - Object.prototype.toString.call(_path)) - } - const path = normalizePath(_path, false); - - for (let index = 0; index < negPatterns.length; index++) { - const nglob = negPatterns[index]; - if (nglob(path)) { - return returnIndex ? -1 : false; - } - } - - const applied = isList && [path].concat(args.slice(1)); - for (let index = 0; index < patterns.length; index++) { - const pattern = patterns[index]; - if (isList ? pattern(...applied) : pattern(path)) { - return returnIndex ? index : true; - } - } - - return returnIndex ? -1 : false; -}; - -/** - * @param {AnymatchMatcher} matchers - * @param {Array|string} testString - * @param {object} options - * @returns {boolean|number|Function} - */ -const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => { - if (matchers == null) { - throw new TypeError('anymatch: specify first argument'); - } - const opts = typeof options === 'boolean' ? {returnIndex: options} : options; - const returnIndex = opts.returnIndex || false; - - // Early cache for matchers. - const mtchers = arrify(matchers); - const negatedGlobs = mtchers - .filter(item => typeof item === 'string' && item.charAt(0) === BANG) - .map(item => item.slice(1)) - .map(item => picomatch(item, opts)); - const patterns = mtchers - .filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG)) - .map(matcher => createPattern(matcher, opts)); - - if (testString == null) { - return (testString, ri = false) => { - const returnIndex = typeof ri === 'boolean' ? ri : false; - return matchPatterns(patterns, negatedGlobs, testString, returnIndex); - } - } - - return matchPatterns(patterns, negatedGlobs, testString, returnIndex); -}; - -anymatch.default = anymatch; -module.exports = anymatch; diff --git a/node_modules/anymatch/package.json b/node_modules/anymatch/package.json deleted file mode 100644 index 2cb2307..0000000 --- a/node_modules/anymatch/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "anymatch", - "version": "3.1.3", - "description": "Matches strings against configurable strings, globs, regular expressions, and/or functions", - "files": [ - "index.js", - "index.d.ts" - ], - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "author": { - "name": "Elan Shanker", - "url": "https://github.com/es128" - }, - "license": "ISC", - "homepage": "https://github.com/micromatch/anymatch", - "repository": { - "type": "git", - "url": "https://github.com/micromatch/anymatch" - }, - "keywords": [ - "match", - "any", - "string", - "file", - "fs", - "list", - "glob", - "regex", - "regexp", - "regular", - "expression", - "function" - ], - "scripts": { - "test": "nyc mocha", - "mocha": "mocha" - }, - "devDependencies": { - "mocha": "^6.1.3", - "nyc": "^14.0.0" - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json deleted file mode 100644 index ac08048..0000000 --- a/node_modules/binary-extensions/binary-extensions.json +++ /dev/null @@ -1,263 +0,0 @@ -[ - "3dm", - "3ds", - "3g2", - "3gp", - "7z", - "a", - "aac", - "adp", - "afdesign", - "afphoto", - "afpub", - "ai", - "aif", - "aiff", - "alz", - "ape", - "apk", - "appimage", - "ar", - "arj", - "asf", - "au", - "avi", - "bak", - "baml", - "bh", - "bin", - "bk", - "bmp", - "btif", - "bz2", - "bzip2", - "cab", - "caf", - "cgm", - "class", - "cmx", - "cpio", - "cr2", - "cur", - "dat", - "dcm", - "deb", - "dex", - "djvu", - "dll", - "dmg", - "dng", - "doc", - "docm", - "docx", - "dot", - "dotm", - "dra", - "DS_Store", - "dsk", - "dts", - "dtshd", - "dvb", - "dwg", - "dxf", - "ecelp4800", - "ecelp7470", - "ecelp9600", - "egg", - "eol", - "eot", - "epub", - "exe", - "f4v", - "fbs", - "fh", - "fla", - "flac", - "flatpak", - "fli", - "flv", - "fpx", - "fst", - "fvt", - "g3", - "gh", - "gif", - "graffle", - "gz", - "gzip", - "h261", - "h263", - "h264", - "icns", - "ico", - "ief", - "img", - "ipa", - "iso", - "jar", - "jpeg", - "jpg", - "jpgv", - "jpm", - "jxr", - "key", - "ktx", - "lha", - "lib", - "lvp", - "lz", - "lzh", - "lzma", - "lzo", - "m3u", - "m4a", - "m4v", - "mar", - "mdi", - "mht", - "mid", - "midi", - "mj2", - "mka", - "mkv", - "mmr", - "mng", - "mobi", - "mov", - "movie", - "mp3", - "mp4", - "mp4a", - "mpeg", - "mpg", - "mpga", - "mxu", - "nef", - "npx", - "numbers", - "nupkg", - "o", - "odp", - "ods", - "odt", - "oga", - "ogg", - "ogv", - "otf", - "ott", - "pages", - "pbm", - "pcx", - "pdb", - "pdf", - "pea", - "pgm", - "pic", - "png", - "pnm", - "pot", - "potm", - "potx", - "ppa", - "ppam", - "ppm", - "pps", - "ppsm", - "ppsx", - "ppt", - "pptm", - "pptx", - "psd", - "pya", - "pyc", - "pyo", - "pyv", - "qt", - "rar", - "ras", - "raw", - "resources", - "rgb", - "rip", - "rlc", - "rmf", - "rmvb", - "rpm", - "rtf", - "rz", - "s3m", - "s7z", - "scpt", - "sgi", - "shar", - "snap", - "sil", - "sketch", - "slk", - "smv", - "snk", - "so", - "stl", - "suo", - "sub", - "swf", - "tar", - "tbz", - "tbz2", - "tga", - "tgz", - "thmx", - "tif", - "tiff", - "tlz", - "ttc", - "ttf", - "txz", - "udf", - "uvh", - "uvi", - "uvm", - "uvp", - "uvs", - "uvu", - "viv", - "vob", - "war", - "wav", - "wax", - "wbmp", - "wdp", - "weba", - "webm", - "webp", - "whl", - "wim", - "wm", - "wma", - "wmv", - "wmx", - "woff", - "woff2", - "wrm", - "wvx", - "xbm", - "xif", - "xla", - "xlam", - "xls", - "xlsb", - "xlsm", - "xlsx", - "xlt", - "xltm", - "xltx", - "xm", - "xmind", - "xpi", - "xpm", - "xwd", - "xz", - "z", - "zip", - "zipx" -] diff --git a/node_modules/binary-extensions/binary-extensions.json.d.ts b/node_modules/binary-extensions/binary-extensions.json.d.ts deleted file mode 100644 index 94a248c..0000000 --- a/node_modules/binary-extensions/binary-extensions.json.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare const binaryExtensionsJson: readonly string[]; - -export = binaryExtensionsJson; diff --git a/node_modules/binary-extensions/index.d.ts b/node_modules/binary-extensions/index.d.ts deleted file mode 100644 index f469ac5..0000000 --- a/node_modules/binary-extensions/index.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** -List of binary file extensions. - -@example -``` -import binaryExtensions = require('binary-extensions'); - -console.log(binaryExtensions); -//=> ['3ds', '3g2', …] -``` -*/ -declare const binaryExtensions: readonly string[]; - -export = binaryExtensions; diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js deleted file mode 100644 index d46e468..0000000 --- a/node_modules/binary-extensions/index.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./binary-extensions.json'); diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license deleted file mode 100644 index 5493a1a..0000000 --- a/node_modules/binary-extensions/license +++ /dev/null @@ -1,10 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) -Copyright (c) Paul Miller (https://paulmillr.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json deleted file mode 100644 index 4710c33..0000000 --- a/node_modules/binary-extensions/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "binary-extensions", - "version": "2.3.0", - "description": "List of binary file extensions", - "license": "MIT", - "repository": "sindresorhus/binary-extensions", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "sideEffects": false, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "binary-extensions.json", - "binary-extensions.json.d.ts" - ], - "keywords": [ - "binary", - "extensions", - "extension", - "file", - "json", - "list", - "array" - ], - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/binary-extensions/readme.md b/node_modules/binary-extensions/readme.md deleted file mode 100644 index 88519b3..0000000 --- a/node_modules/binary-extensions/readme.md +++ /dev/null @@ -1,25 +0,0 @@ -# binary-extensions - -> List of binary file extensions - -The list is just a [JSON file](binary-extensions.json) and can be used anywhere. - -## Install - -```sh -npm install binary-extensions -``` - -## Usage - -```js -const binaryExtensions = require('binary-extensions'); - -console.log(binaryExtensions); -//=> ['3ds', '3g2', …] -``` - -## Related - -- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file -- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/braces/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md deleted file mode 100644 index f59dd60..0000000 --- a/node_modules/braces/README.md +++ /dev/null @@ -1,586 +0,0 @@ -# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) - -> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save braces -``` - -## v3.0.0 Released!! - -See the [changelog](CHANGELOG.md) for details. - -## Why use braces? - -Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. - -- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) -- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. -- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. -- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). -- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). -- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` -- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` -- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` -- [Supports escaping](#escaping) - To prevent evaluation of special characters. - -## Usage - -The main export is a function that takes one or more brace `patterns` and `options`. - -```js -const braces = require('braces'); -// braces(patterns[, options]); - -console.log(braces(['{01..05}', '{a..e}'])); -//=> ['(0[1-5])', '([a-e])'] - -console.log(braces(['{01..05}', '{a..e}'], { expand: true })); -//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] -``` - -### Brace Expansion vs. Compilation - -By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. - -**Compiled** - -```js -console.log(braces('a/{x,y,z}/b')); -//=> ['a/(x|y|z)/b'] -console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); -//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] -``` - -**Expanded** - -Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): - -```js -console.log(braces('a/{x,y,z}/b', { expand: true })); -//=> ['a/x/b', 'a/y/b', 'a/z/b'] - -console.log(braces.expand('{01..10}')); -//=> ['01','02','03','04','05','06','07','08','09','10'] -``` - -### Lists - -Expand lists (like Bash "sets"): - -```js -console.log(braces('a/{foo,bar,baz}/*.js')); -//=> ['a/(foo|bar|baz)/*.js'] - -console.log(braces.expand('a/{foo,bar,baz}/*.js')); -//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] -``` - -### Sequences - -Expand ranges of characters (like Bash "sequences"): - -```js -console.log(braces.expand('{1..3}')); // ['1', '2', '3'] -console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] -console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] -console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] - -// supports zero-padded ranges -console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] -console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] -``` - -See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. - -### Steppped ranges - -Steps, or increments, may be used with ranges: - -```js -console.log(braces.expand('{2..10..2}')); -//=> ['2', '4', '6', '8', '10'] - -console.log(braces('{2..10..2}')); -//=> ['(2|4|6|8|10)'] -``` - -When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. - -### Nesting - -Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. - -**"Expanded" braces** - -```js -console.log(braces.expand('a{b,c,/{x,y}}/e')); -//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] - -console.log(braces.expand('a/{x,{1..5},y}/c')); -//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] -``` - -**"Optimized" braces** - -```js -console.log(braces('a{b,c,/{x,y}}/e')); -//=> ['a(b|c|/(x|y))/e'] - -console.log(braces('a/{x,{1..5},y}/c')); -//=> ['a/(x|([1-5])|y)/c'] -``` - -### Escaping - -**Escaping braces** - -A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: - -```js -console.log(braces.expand('a\\{d,c,b}e')); -//=> ['a{d,c,b}e'] - -console.log(braces.expand('a{d,c,b\\}e')); -//=> ['a{d,c,b}e'] -``` - -**Escaping commas** - -Commas inside braces may also be escaped: - -```js -console.log(braces.expand('a{b\\,c}d')); -//=> ['a{b,c}d'] - -console.log(braces.expand('a{d\\,c,b}e')); -//=> ['ad,ce', 'abe'] -``` - -**Single items** - -Following bash conventions, a brace pattern is also not expanded when it contains a single character: - -```js -console.log(braces.expand('a{b}c')); -//=> ['a{b}c'] -``` - -## Options - -### options.maxLength - -**Type**: `Number` - -**Default**: `10,000` - -**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. - -```js -console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error -``` - -### options.expand - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). - -```js -console.log(braces('a/{b,c}/d', { expand: true })); -//=> [ 'a/b/d', 'a/c/d' ] -``` - -### options.nodupes - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Remove duplicates from the returned array. - -### options.rangeLimit - -**Type**: `Number` - -**Default**: `1000` - -**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. - -You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. - -**Examples** - -```js -// pattern exceeds the "rangeLimit", so it's optimized automatically -console.log(braces.expand('{1..1000}')); -//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] - -// pattern does not exceed "rangeLimit", so it's NOT optimized -console.log(braces.expand('{1..100}')); -//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] -``` - -### options.transform - -**Type**: `Function` - -**Default**: `undefined` - -**Description**: Customize range expansion. - -**Example: Transforming non-numeric values** - -```js -const alpha = braces.expand('x/{a..e}/y', { - transform(value, index) { - // When non-numeric values are passed, "value" is a character code. - return 'foo/' + String.fromCharCode(value) + '-' + index; - }, -}); -console.log(alpha); -//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] -``` - -**Example: Transforming numeric values** - -```js -const numeric = braces.expand('{1..5}', { - transform(value) { - // when numeric values are passed, "value" is a number - return 'foo/' + value * 2; - }, -}); -console.log(numeric); -//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] -``` - -### options.quantifiers - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. - -Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) - -The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. - -**Examples** - -```js -const braces = require('braces'); -console.log(braces('a/b{1,3}/{x,y,z}')); -//=> [ 'a/b(1|3)/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); -//=> [ 'a/b{1,3}/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); -//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] -``` - -### options.keepEscaping - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Do not strip backslashes that were used for escaping from the result. - -## What is "brace expansion"? - -Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). - -In addition to "expansion", braces are also used for matching. In other words: - -- [brace expansion](#brace-expansion) is for generating new lists -- [brace matching](#brace-matching) is for filtering existing lists - -
-More about brace expansion (click to expand) - -There are two main types of brace expansion: - -1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` -2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". - -Here are some example brace patterns to illustrate how they work: - -**Sets** - -``` -{a,b,c} => a b c -{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 -``` - -**Sequences** - -``` -{1..9} => 1 2 3 4 5 6 7 8 9 -{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 -{1..20..3} => 1 4 7 10 13 16 19 -{a..j} => a b c d e f g h i j -{j..a} => j i h g f e d c b a -{a..z..3} => a d g j m p s v y -``` - -**Combination** - -Sets and sequences can be mixed together or used along with any other strings. - -``` -{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 -foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar -``` - -The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. - -## Brace matching - -In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. - -For example, the pattern `foo/{1..3}/bar` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -``` - -But not: - -``` -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -## Brace matching pitfalls - -Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. - -### tldr - -**"brace bombs"** - -- brace expansion can eat up a huge amount of processing resources -- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially -- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) - -For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. - -### The solution - -Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. - -### Geometric complexity - -At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. - -For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: - -``` -{1,2}{3,4} => (2X2) => 13 14 23 24 -{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 -``` - -But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: - -``` -{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 - 249 257 258 259 267 268 269 347 348 349 357 - 358 359 367 368 369 -``` - -Now, imagine how this complexity grows given that each element is a n-tuple: - -``` -{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) -{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) -``` - -Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. - -**More information** - -Interested in learning more about brace expansion? - -- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) -- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) -- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) - -
- -## Performance - -Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. - -### Better algorithms - -Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. - -Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. - -**The proof is in the numbers** - -Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------------------- | ------------------- | ---------------------------- | -| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | -| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | -| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | -| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | -| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | -| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | -| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | -| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | -| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | -| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | -| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | -| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | -| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | -| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | -| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | -| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | -| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | - -### Faster algorithms - -When you need expansion, braces is still much faster. - -_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------- | --------------------------- | ---------------------------- | -| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | -| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | -| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | -| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | -| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | -| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | -| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | -| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | - -If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). - -## Benchmarks - -### Running benchmarks - -Install dev dependencies: - -```bash -npm i -d && npm benchmark -``` - -### Latest results - -Braces is more accurate, without sacrificing performance. - -```bash -● expand - range (expanded) - braces x 53,167 ops/sec ±0.12% (102 runs sampled) - minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) -● expand - range (optimized for regex) - braces x 373,442 ops/sec ±0.04% (100 runs sampled) - minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) -● expand - nested ranges (expanded) - braces x 33,921 ops/sec ±0.09% (99 runs sampled) - minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) -● expand - nested ranges (optimized for regex) - braces x 287,479 ops/sec ±0.52% (98 runs sampled) - minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) -● expand - set (expanded) - braces x 238,243 ops/sec ±0.19% (97 runs sampled) - minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) -● expand - set (optimized for regex) - braces x 321,844 ops/sec ±0.10% (97 runs sampled) - minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) -● expand - nested sets (expanded) - braces x 165,371 ops/sec ±0.42% (96 runs sampled) - minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) -● expand - nested sets (optimized for regex) - braces x 242,948 ops/sec ±0.12% (99 runs sampled) - minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| ----------- | ------------------------------------------------------------- | -| 197 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [doowb](https://github.com/doowb) | -| 1 | [es128](https://github.com/es128) | -| 1 | [eush77](https://github.com/eush77) | -| 1 | [hemanth](https://github.com/hemanth) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -- [GitHub Profile](https://github.com/jonschlinkert) -- [Twitter Profile](https://twitter.com/jonschlinkert) -- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - ---- - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js deleted file mode 100644 index d222c13..0000000 --- a/node_modules/braces/index.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict'; - -const stringify = require('./lib/stringify'); -const compile = require('./lib/compile'); -const expand = require('./lib/expand'); -const parse = require('./lib/parse'); - -/** - * Expand the given pattern or create a regex-compatible string. - * - * ```js - * const braces = require('braces'); - * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] - * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {String} - * @api public - */ - -const braces = (input, options = {}) => { - let output = []; - - if (Array.isArray(input)) { - for (const pattern of input) { - const result = braces.create(pattern, options); - if (Array.isArray(result)) { - output.push(...result); - } else { - output.push(result); - } - } - } else { - output = [].concat(braces.create(input, options)); - } - - if (options && options.expand === true && options.nodupes === true) { - output = [...new Set(output)]; - } - return output; -}; - -/** - * Parse the given `str` with the given `options`. - * - * ```js - * // braces.parse(pattern, [, options]); - * const ast = braces.parse('a/{b,c}/d'); - * console.log(ast); - * ``` - * @param {String} pattern Brace pattern to parse - * @param {Object} options - * @return {Object} Returns an AST - * @api public - */ - -braces.parse = (input, options = {}) => parse(input, options); - -/** - * Creates a braces string from an AST, or an AST node. - * - * ```js - * const braces = require('braces'); - * let ast = braces.parse('foo/{a,b}/bar'); - * console.log(stringify(ast.nodes[2])); //=> '{a,b}' - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.stringify = (input, options = {}) => { - if (typeof input === 'string') { - return stringify(braces.parse(input, options), options); - } - return stringify(input, options); -}; - -/** - * Compiles a brace pattern into a regex-compatible, optimized string. - * This method is called by the main [braces](#braces) function by default. - * - * ```js - * const braces = require('braces'); - * console.log(braces.compile('a/{b,c}/d')); - * //=> ['a/(b|c)/d'] - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.compile = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - return compile(input, options); -}; - -/** - * Expands a brace pattern into an array. This method is called by the - * main [braces](#braces) function when `options.expand` is true. Before - * using this method it's recommended that you read the [performance notes](#performance)) - * and advantages of using [.compile](#compile) instead. - * - * ```js - * const braces = require('braces'); - * console.log(braces.expand('a/{b,c}/d')); - * //=> ['a/b/d', 'a/c/d']; - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.expand = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - - let result = expand(input, options); - - // filter out empty strings if specified - if (options.noempty === true) { - result = result.filter(Boolean); - } - - // filter out duplicates if specified - if (options.nodupes === true) { - result = [...new Set(result)]; - } - - return result; -}; - -/** - * Processes a brace pattern and returns either an expanded array - * (if `options.expand` is true), a highly optimized regex-compatible string. - * This method is called by the main [braces](#braces) function. - * - * ```js - * const braces = require('braces'); - * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) - * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.create = (input, options = {}) => { - if (input === '' || input.length < 3) { - return [input]; - } - - return options.expand !== true - ? braces.compile(input, options) - : braces.expand(input, options); -}; - -/** - * Expose "braces" - */ - -module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js deleted file mode 100644 index dce69be..0000000 --- a/node_modules/braces/lib/compile.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const utils = require('./utils'); - -const compile = (ast, options = {}) => { - const walk = (node, parent = {}) => { - const invalidBlock = utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - const invalid = invalidBlock === true || invalidNode === true; - const prefix = options.escapeInvalid === true ? '\\' : ''; - let output = ''; - - if (node.isOpen === true) { - return prefix + node.value; - } - - if (node.isClose === true) { - console.log('node.isClose', prefix, node.value); - return prefix + node.value; - } - - if (node.type === 'open') { - return invalid ? prefix + node.value : '('; - } - - if (node.type === 'close') { - return invalid ? prefix + node.value : ')'; - } - - if (node.type === 'comma') { - return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; - } - - if (node.value) { - return node.value; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); - - if (range.length !== 0) { - return args.length > 1 && range.length > 1 ? `(${range})` : range; - } - } - - if (node.nodes) { - for (const child of node.nodes) { - output += walk(child, node); - } - } - - return output; - }; - - return walk(ast); -}; - -module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js deleted file mode 100644 index 2bb3b88..0000000 --- a/node_modules/braces/lib/constants.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict'; - -module.exports = { - MAX_LENGTH: 10000, - - // Digits - CHAR_0: '0', /* 0 */ - CHAR_9: '9', /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 'A', /* A */ - CHAR_LOWERCASE_A: 'a', /* a */ - CHAR_UPPERCASE_Z: 'Z', /* Z */ - CHAR_LOWERCASE_Z: 'z', /* z */ - - CHAR_LEFT_PARENTHESES: '(', /* ( */ - CHAR_RIGHT_PARENTHESES: ')', /* ) */ - - CHAR_ASTERISK: '*', /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: '&', /* & */ - CHAR_AT: '@', /* @ */ - CHAR_BACKSLASH: '\\', /* \ */ - CHAR_BACKTICK: '`', /* ` */ - CHAR_CARRIAGE_RETURN: '\r', /* \r */ - CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ - CHAR_COLON: ':', /* : */ - CHAR_COMMA: ',', /* , */ - CHAR_DOLLAR: '$', /* . */ - CHAR_DOT: '.', /* . */ - CHAR_DOUBLE_QUOTE: '"', /* " */ - CHAR_EQUAL: '=', /* = */ - CHAR_EXCLAMATION_MARK: '!', /* ! */ - CHAR_FORM_FEED: '\f', /* \f */ - CHAR_FORWARD_SLASH: '/', /* / */ - CHAR_HASH: '#', /* # */ - CHAR_HYPHEN_MINUS: '-', /* - */ - CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ - CHAR_LEFT_CURLY_BRACE: '{', /* { */ - CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ - CHAR_LINE_FEED: '\n', /* \n */ - CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ - CHAR_PERCENT: '%', /* % */ - CHAR_PLUS: '+', /* + */ - CHAR_QUESTION_MARK: '?', /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ - CHAR_RIGHT_CURLY_BRACE: '}', /* } */ - CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ - CHAR_SEMICOLON: ';', /* ; */ - CHAR_SINGLE_QUOTE: '\'', /* ' */ - CHAR_SPACE: ' ', /* */ - CHAR_TAB: '\t', /* \t */ - CHAR_UNDERSCORE: '_', /* _ */ - CHAR_VERTICAL_LINE: '|', /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ -}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js deleted file mode 100644 index 35b2c41..0000000 --- a/node_modules/braces/lib/expand.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const stringify = require('./stringify'); -const utils = require('./utils'); - -const append = (queue = '', stash = '', enclose = false) => { - const result = []; - - queue = [].concat(queue); - stash = [].concat(stash); - - if (!stash.length) return queue; - if (!queue.length) { - return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; - } - - for (const item of queue) { - if (Array.isArray(item)) { - for (const value of item) { - result.push(append(value, stash, enclose)); - } - } else { - for (let ele of stash) { - if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; - result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); - } - } - } - return utils.flatten(result); -}; - -const expand = (ast, options = {}) => { - const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; - - const walk = (node, parent = {}) => { - node.queue = []; - - let p = parent; - let q = parent.queue; - - while (p.type !== 'brace' && p.type !== 'root' && p.parent) { - p = p.parent; - q = p.queue; - } - - if (node.invalid || node.dollar) { - q.push(append(q.pop(), stringify(node, options))); - return; - } - - if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { - q.push(append(q.pop(), ['{}'])); - return; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - - if (utils.exceedsLimit(...args, options.step, rangeLimit)) { - throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); - } - - let range = fill(...args, options); - if (range.length === 0) { - range = stringify(node, options); - } - - q.push(append(q.pop(), range)); - node.nodes = []; - return; - } - - const enclose = utils.encloseBrace(node); - let queue = node.queue; - let block = node; - - while (block.type !== 'brace' && block.type !== 'root' && block.parent) { - block = block.parent; - queue = block.queue; - } - - for (let i = 0; i < node.nodes.length; i++) { - const child = node.nodes[i]; - - if (child.type === 'comma' && node.type === 'brace') { - if (i === 1) queue.push(''); - queue.push(''); - continue; - } - - if (child.type === 'close') { - q.push(append(q.pop(), queue, enclose)); - continue; - } - - if (child.value && child.type !== 'open') { - queue.push(append(queue.pop(), child.value)); - continue; - } - - if (child.nodes) { - walk(child, node); - } - } - - return queue; - }; - - return utils.flatten(walk(ast)); -}; - -module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js deleted file mode 100644 index 3a6988e..0000000 --- a/node_modules/braces/lib/parse.js +++ /dev/null @@ -1,331 +0,0 @@ -'use strict'; - -const stringify = require('./stringify'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - CHAR_BACKSLASH, /* \ */ - CHAR_BACKTICK, /* ` */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_RIGHT_SQUARE_BRACKET, /* ] */ - CHAR_DOUBLE_QUOTE, /* " */ - CHAR_SINGLE_QUOTE, /* ' */ - CHAR_NO_BREAK_SPACE, - CHAR_ZERO_WIDTH_NOBREAK_SPACE -} = require('./constants'); - -/** - * parse - */ - -const parse = (input, options = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - const opts = options || {}; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - if (input.length > max) { - throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); - } - - const ast = { type: 'root', input, nodes: [] }; - const stack = [ast]; - let block = ast; - let prev = ast; - let brackets = 0; - const length = input.length; - let index = 0; - let depth = 0; - let value; - - /** - * Helpers - */ - - const advance = () => input[index++]; - const push = node => { - if (node.type === 'text' && prev.type === 'dot') { - prev.type = 'text'; - } - - if (prev && prev.type === 'text' && node.type === 'text') { - prev.value += node.value; - return; - } - - block.nodes.push(node); - node.parent = block; - node.prev = prev; - prev = node; - return node; - }; - - push({ type: 'bos' }); - - while (index < length) { - block = stack[stack.length - 1]; - value = advance(); - - /** - * Invalid chars - */ - - if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { - continue; - } - - /** - * Escaped chars - */ - - if (value === CHAR_BACKSLASH) { - push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); - continue; - } - - /** - * Right square bracket (literal): ']' - */ - - if (value === CHAR_RIGHT_SQUARE_BRACKET) { - push({ type: 'text', value: '\\' + value }); - continue; - } - - /** - * Left square bracket: '[' - */ - - if (value === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - - let next; - - while (index < length && (next = advance())) { - value += next; - - if (next === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - continue; - } - - if (next === CHAR_BACKSLASH) { - value += advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - brackets--; - - if (brackets === 0) { - break; - } - } - } - - push({ type: 'text', value }); - continue; - } - - /** - * Parentheses - */ - - if (value === CHAR_LEFT_PARENTHESES) { - block = push({ type: 'paren', nodes: [] }); - stack.push(block); - push({ type: 'text', value }); - continue; - } - - if (value === CHAR_RIGHT_PARENTHESES) { - if (block.type !== 'paren') { - push({ type: 'text', value }); - continue; - } - block = stack.pop(); - push({ type: 'text', value }); - block = stack[stack.length - 1]; - continue; - } - - /** - * Quotes: '|"|` - */ - - if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { - const open = value; - let next; - - if (options.keepQuotes !== true) { - value = ''; - } - - while (index < length && (next = advance())) { - if (next === CHAR_BACKSLASH) { - value += next + advance(); - continue; - } - - if (next === open) { - if (options.keepQuotes === true) value += next; - break; - } - - value += next; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Left curly brace: '{' - */ - - if (value === CHAR_LEFT_CURLY_BRACE) { - depth++; - - const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; - const brace = { - type: 'brace', - open: true, - close: false, - dollar, - depth, - commas: 0, - ranges: 0, - nodes: [] - }; - - block = push(brace); - stack.push(block); - push({ type: 'open', value }); - continue; - } - - /** - * Right curly brace: '}' - */ - - if (value === CHAR_RIGHT_CURLY_BRACE) { - if (block.type !== 'brace') { - push({ type: 'text', value }); - continue; - } - - const type = 'close'; - block = stack.pop(); - block.close = true; - - push({ type, value }); - depth--; - - block = stack[stack.length - 1]; - continue; - } - - /** - * Comma: ',' - */ - - if (value === CHAR_COMMA && depth > 0) { - if (block.ranges > 0) { - block.ranges = 0; - const open = block.nodes.shift(); - block.nodes = [open, { type: 'text', value: stringify(block) }]; - } - - push({ type: 'comma', value }); - block.commas++; - continue; - } - - /** - * Dot: '.' - */ - - if (value === CHAR_DOT && depth > 0 && block.commas === 0) { - const siblings = block.nodes; - - if (depth === 0 || siblings.length === 0) { - push({ type: 'text', value }); - continue; - } - - if (prev.type === 'dot') { - block.range = []; - prev.value += value; - prev.type = 'range'; - - if (block.nodes.length !== 3 && block.nodes.length !== 5) { - block.invalid = true; - block.ranges = 0; - prev.type = 'text'; - continue; - } - - block.ranges++; - block.args = []; - continue; - } - - if (prev.type === 'range') { - siblings.pop(); - - const before = siblings[siblings.length - 1]; - before.value += prev.value + value; - prev = before; - block.ranges--; - continue; - } - - push({ type: 'dot', value }); - continue; - } - - /** - * Text - */ - - push({ type: 'text', value }); - } - - // Mark imbalanced braces and brackets as invalid - do { - block = stack.pop(); - - if (block.type !== 'root') { - block.nodes.forEach(node => { - if (!node.nodes) { - if (node.type === 'open') node.isOpen = true; - if (node.type === 'close') node.isClose = true; - if (!node.nodes) node.type = 'text'; - node.invalid = true; - } - }); - - // get the location of the block on parent.nodes (block's siblings) - const parent = stack[stack.length - 1]; - const index = parent.nodes.indexOf(block); - // replace the (invalid) block with it's nodes - parent.nodes.splice(index, 1, ...block.nodes); - } - } while (stack.length > 0); - - push({ type: 'eos' }); - return ast; -}; - -module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js deleted file mode 100644 index 8bcf872..0000000 --- a/node_modules/braces/lib/stringify.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; - -const utils = require('./utils'); - -module.exports = (ast, options = {}) => { - const stringify = (node, parent = {}) => { - const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - let output = ''; - - if (node.value) { - if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { - return '\\' + node.value; - } - return node.value; - } - - if (node.value) { - return node.value; - } - - if (node.nodes) { - for (const child of node.nodes) { - output += stringify(child); - } - } - return output; - }; - - return stringify(ast); -}; - diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js deleted file mode 100644 index d19311f..0000000 --- a/node_modules/braces/lib/utils.js +++ /dev/null @@ -1,122 +0,0 @@ -'use strict'; - -exports.isInteger = num => { - if (typeof num === 'number') { - return Number.isInteger(num); - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isInteger(Number(num)); - } - return false; -}; - -/** - * Find a node of the given type - */ - -exports.find = (node, type) => node.nodes.find(node => node.type === type); - -/** - * Find a node of the given type - */ - -exports.exceedsLimit = (min, max, step = 1, limit) => { - if (limit === false) return false; - if (!exports.isInteger(min) || !exports.isInteger(max)) return false; - return ((Number(max) - Number(min)) / Number(step)) >= limit; -}; - -/** - * Escape the given node with '\\' before node.value - */ - -exports.escapeNode = (block, n = 0, type) => { - const node = block.nodes[n]; - if (!node) return; - - if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { - if (node.escaped !== true) { - node.value = '\\' + node.value; - node.escaped = true; - } - } -}; - -/** - * Returns true if the given brace node should be enclosed in literal braces - */ - -exports.encloseBrace = node => { - if (node.type !== 'brace') return false; - if ((node.commas >> 0 + node.ranges >> 0) === 0) { - node.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a brace node is invalid. - */ - -exports.isInvalidBrace = block => { - if (block.type !== 'brace') return false; - if (block.invalid === true || block.dollar) return true; - if ((block.commas >> 0 + block.ranges >> 0) === 0) { - block.invalid = true; - return true; - } - if (block.open !== true || block.close !== true) { - block.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a node is an open or close node - */ - -exports.isOpenOrClose = node => { - if (node.type === 'open' || node.type === 'close') { - return true; - } - return node.open === true || node.close === true; -}; - -/** - * Reduce an array of text nodes. - */ - -exports.reduce = nodes => nodes.reduce((acc, node) => { - if (node.type === 'text') acc.push(node.value); - if (node.type === 'range') node.type = 'text'; - return acc; -}, []); - -/** - * Flatten an array - */ - -exports.flatten = (...args) => { - const result = []; - - const flat = arr => { - for (let i = 0; i < arr.length; i++) { - const ele = arr[i]; - - if (Array.isArray(ele)) { - flat(ele); - continue; - } - - if (ele !== undefined) { - result.push(ele); - } - } - return result; - }; - - flat(args); - return result; -}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json deleted file mode 100644 index c3c056e..0000000 --- a/node_modules/braces/package.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "braces", - "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", - "version": "3.0.3", - "homepage": "https://github.com/micromatch/braces", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Elan Shanker (https://github.com/es128)", - "Eugene Sharygin (https://github.com/eush77)", - "hemanth.hm (http://h3manth.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/braces", - "bugs": { - "url": "https://github.com/micromatch/braces/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "mocha", - "benchmark": "node benchmark" - }, - "dependencies": { - "fill-range": "^7.1.1" - }, - "devDependencies": { - "ansi-colors": "^3.2.4", - "bash-path": "^2.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1" - }, - "keywords": [ - "alpha", - "alphabetical", - "bash", - "brace", - "braces", - "expand", - "expansion", - "filepath", - "fill", - "fs", - "glob", - "globbing", - "letter", - "match", - "matches", - "matching", - "number", - "numerical", - "path", - "range", - "ranges", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "lint": { - "reflinks": true - }, - "plugins": [ - "gulp-format-md" - ] - } -} diff --git a/node_modules/chokidar/LICENSE b/node_modules/chokidar/LICENSE deleted file mode 100644 index fa9162b..0000000 --- a/node_modules/chokidar/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the “Software”), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/chokidar/README.md b/node_modules/chokidar/README.md deleted file mode 100644 index 8e25dec..0000000 --- a/node_modules/chokidar/README.md +++ /dev/null @@ -1,308 +0,0 @@ -# Chokidar [![Weekly downloads](https://img.shields.io/npm/dw/chokidar.svg)](https://github.com/paulmillr/chokidar) [![Yearly downloads](https://img.shields.io/npm/dy/chokidar.svg)](https://github.com/paulmillr/chokidar) - -> Minimal and efficient cross-platform file watching library - -[![NPM](https://nodei.co/npm/chokidar.png)](https://www.npmjs.com/package/chokidar) - -## Why? - -Node.js `fs.watch`: - -* Doesn't report filenames on MacOS. -* Doesn't report events at all when using editors like Sublime on MacOS. -* Often reports events twice. -* Emits most changes as `rename`. -* Does not provide an easy way to recursively watch file trees. -* Does not support recursive watching on Linux. - -Node.js `fs.watchFile`: - -* Almost as bad at event handling. -* Also does not provide any recursive watching. -* Results in high CPU utilization. - -Chokidar resolves these problems. - -Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in -[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode), -[gulp](https://github.com/gulpjs/gulp/), -[karma](https://karma-runner.github.io/), -[PM2](https://github.com/Unitech/PM2), -[browserify](http://browserify.org/), -[webpack](https://webpack.github.io/), -[BrowserSync](https://www.browsersync.io/), -and [many others](https://www.npmjs.com/browse/depended/chokidar). -It has proven itself in production environments. - -Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/) - -## How? - -Chokidar does still rely on the Node.js core `fs` module, but when using -`fs.watch` and `fs.watchFile` for watching, it normalizes the events it -receives, often checking for truth by getting file stats and/or dir contents. - -On MacOS, chokidar by default uses a native extension exposing the Darwin -`FSEvents` API. This provides very efficient recursive watching compared with -implementations like `kqueue` available on most \*nix platforms. Chokidar still -does have to do some work to normalize the events received that way as well. - -On most other platforms, the `fs.watch`-based implementation is the default, which -avoids polling and keeps CPU usage down. Be advised that chokidar will initiate -watchers recursively for everything within scope of the paths that have been -specified, so be judicious about not wasting system resources by watching much -more than needed. - -## Getting started - -Install with npm: - -```sh -npm install chokidar -``` - -Then `require` and use it in your code: - -```javascript -const chokidar = require('chokidar'); - -// One-liner for current directory -chokidar.watch('.').on('all', (event, path) => { - console.log(event, path); -}); -``` - -## API - -```javascript -// Example of a more typical implementation structure - -// Initialize watcher. -const watcher = chokidar.watch('file, dir, glob, or array', { - ignored: /(^|[\/\\])\../, // ignore dotfiles - persistent: true -}); - -// Something to use when events are received. -const log = console.log.bind(console); -// Add event listeners. -watcher - .on('add', path => log(`File ${path} has been added`)) - .on('change', path => log(`File ${path} has been changed`)) - .on('unlink', path => log(`File ${path} has been removed`)); - -// More possible events. -watcher - .on('addDir', path => log(`Directory ${path} has been added`)) - .on('unlinkDir', path => log(`Directory ${path} has been removed`)) - .on('error', error => log(`Watcher error: ${error}`)) - .on('ready', () => log('Initial scan complete. Ready for changes')) - .on('raw', (event, path, details) => { // internal - log('Raw event info:', event, path, details); - }); - -// 'add', 'addDir' and 'change' events also receive stat() results as second -// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats -watcher.on('change', (path, stats) => { - if (stats) console.log(`File ${path} changed size to ${stats.size}`); -}); - -// Watch new files. -watcher.add('new-file'); -watcher.add(['new-file-2', 'new-file-3', '**/other-file*']); - -// Get list of actual paths being watched on the filesystem -var watchedPaths = watcher.getWatched(); - -// Un-watch some files. -await watcher.unwatch('new-file*'); - -// Stop watching. -// The method is async! -watcher.close().then(() => console.log('closed')); - -// Full list of options. See below for descriptions. -// Do not use this example! -chokidar.watch('file', { - persistent: true, - - ignored: '*.txt', - ignoreInitial: false, - followSymlinks: true, - cwd: '.', - disableGlobbing: false, - - usePolling: false, - interval: 100, - binaryInterval: 300, - alwaysStat: false, - depth: 99, - awaitWriteFinish: { - stabilityThreshold: 2000, - pollInterval: 100 - }, - - ignorePermissionErrors: false, - atomic: true // or a custom 'atomicity delay', in milliseconds (default 100) -}); - -``` - -`chokidar.watch(paths, [options])` - -* `paths` (string or array of strings). Paths to files, dirs to be watched -recursively, or glob patterns. - - Note: globs must not contain windows separators (`\`), - because that's how they work by the standard — - you'll need to replace them with forward slashes (`/`). - - Note 2: for additional glob documentation, check out low-level - library: [picomatch](https://github.com/micromatch/picomatch). -* `options` (object) Options object as defined below: - -#### Persistence - -* `persistent` (default: `true`). Indicates whether the process -should continue to run as long as files are being watched. If set to -`false` when using `fsevents` to watch, no more events will be emitted -after `ready`, even if the process continues to run. - -#### Path filtering - -* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition) -Defines files/paths to be ignored. The whole relative or absolute path is -tested, not just filename. If a function with two arguments is provided, it -gets called twice per path - once with a single argument (the path), second -time with two arguments (the path and the -[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) -object of that path). -* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while -instantiating the watching as chokidar discovers these file paths (before the `ready` event). -* `followSymlinks` (default: `true`). When `false`, only the -symlinks themselves will be watched for changes instead of following -the link references and bubbling events through the link's path. -* `cwd` (no default). The base directory from which watch `paths` are to be -derived. Paths emitted with events will be relative to this. -* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as -literal path names, even if they look like globs. - -#### Performance - -* `usePolling` (default: `false`). -Whether to use fs.watchFile (backed by polling), or fs.watch. If polling -leads to high CPU utilization, consider setting this to `false`. It is -typically necessary to **set this to `true` to successfully watch files over -a network**, and it may be necessary to successfully watch files in other -non-standard situations. Setting to `true` explicitly on MacOS overrides the -`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable -to true (1) or false (0) in order to override this option. -* _Polling-specific settings_ (effective when `usePolling: true`) - * `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also - set the CHOKIDAR_INTERVAL env variable to override this option. - * `binaryInterval` (default: `300`). Interval of file system - polling for binary files. - ([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) -* `useFsEvents` (default: `true` on MacOS). Whether to use the -`fsevents` watching interface if available. When set to `true` explicitly -and `fsevents` is available this supercedes the `usePolling` setting. When -set to `false` on MacOS, `usePolling: true` becomes the default. -* `alwaysStat` (default: `false`). If relying upon the -[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) -object that may get passed with `add`, `addDir`, and `change` events, set -this to `true` to ensure it is provided even in cases where it wasn't -already available from the underlying watch events. -* `depth` (default: `undefined`). If set, limits how many levels of -subdirectories will be traversed. -* `awaitWriteFinish` (default: `false`). -By default, the `add` event will fire when a file first appears on disk, before -the entire file has been written. Furthermore, in some cases some `change` -events will be emitted while the file is being written. In some cases, -especially when watching for large files there will be a need to wait for the -write operation to finish before responding to a file creation or modification. -Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size, -holding its `add` and `change` events until the size does not change for a -configurable amount of time. The appropriate duration setting is heavily -dependent on the OS and hardware. For accurate detection this parameter should -be relatively high, making file watching much less responsive. -Use with caution. - * *`options.awaitWriteFinish` can be set to an object in order to adjust - timing params:* - * `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in - milliseconds for a file size to remain constant before emitting its event. - * `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds. - -#### Errors - -* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files -that don't have read permissions if possible. If watching fails due to `EPERM` -or `EACCES` with this set to `true`, the errors will be suppressed silently. -* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`). -Automatically filters out artifacts that occur when using editors that use -"atomic writes" instead of writing directly to the source file. If a file is -re-added within 100 ms of being deleted, Chokidar emits a `change` event -rather than `unlink` then `add`. If the default of 100 ms does not work well -for you, you can override it by setting `atomic` to a custom value, in -milliseconds. - -### Methods & Events - -`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`: - -* `.add(path / paths)`: Add files, directories, or glob patterns for tracking. -Takes an array of strings or just one string. -* `.on(event, callback)`: Listen for an FS event. -Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`, -`raw`, `error`. -Additionally `all` is available which gets emitted with the underlying event -name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully. -* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns. -Takes an array of strings or just one string. -* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen. -* `.getWatched()`: Returns an object representing all the paths on the file -system being watched by this `FSWatcher` instance. The object's keys are all the -directories (using absolute paths unless the `cwd` option was used), and the -values are arrays of the names of the items contained in each directory. - -## CLI - -If you need a CLI interface for your file watching, check out -[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to -execute a command on each change, or get a stdio stream of change events. - -## Install Troubleshooting - -* `npm WARN optional dep failed, continuing fsevents@n.n.n` - * This message is normal part of how `npm` handles optional dependencies and is - not indicative of a problem. Even if accompanied by other related error messages, - Chokidar should function properly. - -* `TypeError: fsevents is not a constructor` - * Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar. - -* Chokidar is producing `ENOSP` error on Linux, like this: - * `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell` - `Error: watch /home/ ENOSPC` - * This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal: - `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p` - -## Changelog - -For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md). -- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks. -- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement. -- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method. -- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions. -- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%. -- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher. -- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes. -- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported -- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66) - -## Also - -Why was chokidar named this way? What's the meaning behind it? - ->Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India. - -## License - -MIT (c) Paul Miller (), see [LICENSE](LICENSE) file. diff --git a/node_modules/chokidar/index.js b/node_modules/chokidar/index.js deleted file mode 100644 index 8752893..0000000 --- a/node_modules/chokidar/index.js +++ /dev/null @@ -1,973 +0,0 @@ -'use strict'; - -const { EventEmitter } = require('events'); -const fs = require('fs'); -const sysPath = require('path'); -const { promisify } = require('util'); -const readdirp = require('readdirp'); -const anymatch = require('anymatch').default; -const globParent = require('glob-parent'); -const isGlob = require('is-glob'); -const braces = require('braces'); -const normalizePath = require('normalize-path'); - -const NodeFsHandler = require('./lib/nodefs-handler'); -const FsEventsHandler = require('./lib/fsevents-handler'); -const { - EV_ALL, - EV_READY, - EV_ADD, - EV_CHANGE, - EV_UNLINK, - EV_ADD_DIR, - EV_UNLINK_DIR, - EV_RAW, - EV_ERROR, - - STR_CLOSE, - STR_END, - - BACK_SLASH_RE, - DOUBLE_SLASH_RE, - SLASH_OR_BACK_SLASH_RE, - DOT_RE, - REPLACER_RE, - - SLASH, - SLASH_SLASH, - BRACE_START, - BANG, - ONE_DOT, - TWO_DOTS, - GLOBSTAR, - SLASH_GLOBSTAR, - ANYMATCH_OPTS, - STRING_TYPE, - FUNCTION_TYPE, - EMPTY_STR, - EMPTY_FN, - - isWindows, - isMacos, - isIBMi -} = require('./lib/constants'); - -const stat = promisify(fs.stat); -const readdir = promisify(fs.readdir); - -/** - * @typedef {String} Path - * @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName - * @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType - */ - -/** - * - * @typedef {Object} WatchHelpers - * @property {Boolean} followSymlinks - * @property {'stat'|'lstat'} statMethod - * @property {Path} path - * @property {Path} watchPath - * @property {Function} entryPath - * @property {Boolean} hasGlob - * @property {Object} globFilter - * @property {Function} filterPath - * @property {Function} filterDir - */ - -const arrify = (value = []) => Array.isArray(value) ? value : [value]; -const flatten = (list, result = []) => { - list.forEach(item => { - if (Array.isArray(item)) { - flatten(item, result); - } else { - result.push(item); - } - }); - return result; -}; - -const unifyPaths = (paths_) => { - /** - * @type {Array} - */ - const paths = flatten(arrify(paths_)); - if (!paths.every(p => typeof p === STRING_TYPE)) { - throw new TypeError(`Non-string provided as watch path: ${paths}`); - } - return paths.map(normalizePathToUnix); -}; - -// If SLASH_SLASH occurs at the beginning of path, it is not replaced -// because "//StoragePC/DrivePool/Movies" is a valid network path -const toUnix = (string) => { - let str = string.replace(BACK_SLASH_RE, SLASH); - let prepend = false; - if (str.startsWith(SLASH_SLASH)) { - prepend = true; - } - while (str.match(DOUBLE_SLASH_RE)) { - str = str.replace(DOUBLE_SLASH_RE, SLASH); - } - if (prepend) { - str = SLASH + str; - } - return str; -}; - -// Our version of upath.normalize -// TODO: this is not equal to path-normalize module - investigate why -const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path))); - -const normalizeIgnored = (cwd = EMPTY_STR) => (path) => { - if (typeof path !== STRING_TYPE) return path; - return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path)); -}; - -const getAbsolutePath = (path, cwd) => { - if (sysPath.isAbsolute(path)) { - return path; - } - if (path.startsWith(BANG)) { - return BANG + sysPath.join(cwd, path.slice(1)); - } - return sysPath.join(cwd, path); -}; - -const undef = (opts, key) => opts[key] === undefined; - -/** - * Directory entry. - * @property {Path} path - * @property {Set} items - */ -class DirEntry { - /** - * @param {Path} dir - * @param {Function} removeWatcher - */ - constructor(dir, removeWatcher) { - this.path = dir; - this._removeWatcher = removeWatcher; - /** @type {Set} */ - this.items = new Set(); - } - - add(item) { - const {items} = this; - if (!items) return; - if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item); - } - - async remove(item) { - const {items} = this; - if (!items) return; - items.delete(item); - if (items.size > 0) return; - - const dir = this.path; - try { - await readdir(dir); - } catch (err) { - if (this._removeWatcher) { - this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir)); - } - } - } - - has(item) { - const {items} = this; - if (!items) return; - return items.has(item); - } - - /** - * @returns {Array} - */ - getChildren() { - const {items} = this; - if (!items) return; - return [...items.values()]; - } - - dispose() { - this.items.clear(); - delete this.path; - delete this._removeWatcher; - delete this.items; - Object.freeze(this); - } -} - -const STAT_METHOD_F = 'stat'; -const STAT_METHOD_L = 'lstat'; -class WatchHelper { - constructor(path, watchPath, follow, fsw) { - this.fsw = fsw; - this.path = path = path.replace(REPLACER_RE, EMPTY_STR); - this.watchPath = watchPath; - this.fullWatchPath = sysPath.resolve(watchPath); - this.hasGlob = watchPath !== path; - /** @type {object|boolean} */ - if (path === EMPTY_STR) this.hasGlob = false; - this.globSymlink = this.hasGlob && follow ? undefined : false; - this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false; - this.dirParts = this.getDirParts(path); - this.dirParts.forEach((parts) => { - if (parts.length > 1) parts.pop(); - }); - this.followSymlinks = follow; - this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L; - } - - checkGlobSymlink(entry) { - // only need to resolve once - // first entry should always have entry.parentDir === EMPTY_STR - if (this.globSymlink === undefined) { - this.globSymlink = entry.fullParentDir === this.fullWatchPath ? - false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath}; - } - - if (this.globSymlink) { - return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath); - } - - return entry.fullPath; - } - - entryPath(entry) { - return sysPath.join(this.watchPath, - sysPath.relative(this.watchPath, this.checkGlobSymlink(entry)) - ); - } - - filterPath(entry) { - const {stats} = entry; - if (stats && stats.isSymbolicLink()) return this.filterDir(entry); - const resolvedPath = this.entryPath(entry); - const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ? - this.globFilter(resolvedPath) : true; - return matchesGlob && - this.fsw._isntIgnored(resolvedPath, stats) && - this.fsw._hasReadPermissions(stats); - } - - getDirParts(path) { - if (!this.hasGlob) return []; - const parts = []; - const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path]; - expandedPath.forEach((path) => { - parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE)); - }); - return parts; - } - - filterDir(entry) { - if (this.hasGlob) { - const entryParts = this.getDirParts(this.checkGlobSymlink(entry)); - let globstar = false; - this.unmatchedGlob = !this.dirParts.some((parts) => { - return parts.every((part, i) => { - if (part === GLOBSTAR) globstar = true; - return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS); - }); - }); - } - return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats); - } -} - -/** - * Watches files & directories for changes. Emitted events: - * `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error` - * - * new FSWatcher() - * .add(directories) - * .on('add', path => log('File', path, 'was added')) - */ -class FSWatcher extends EventEmitter { -// Not indenting methods for history sake; for now. -constructor(_opts) { - super(); - - const opts = {}; - if (_opts) Object.assign(opts, _opts); // for frozen objects - - /** @type {Map} */ - this._watched = new Map(); - /** @type {Map} */ - this._closers = new Map(); - /** @type {Set} */ - this._ignoredPaths = new Set(); - - /** @type {Map} */ - this._throttled = new Map(); - - /** @type {Map} */ - this._symlinkPaths = new Map(); - - this._streams = new Set(); - this.closed = false; - - // Set up default options. - if (undef(opts, 'persistent')) opts.persistent = true; - if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false; - if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false; - if (undef(opts, 'interval')) opts.interval = 100; - if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300; - if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false; - opts.enableBinaryInterval = opts.binaryInterval !== opts.interval; - - // Enable fsevents on OS X when polling isn't explicitly enabled. - if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling; - - // If we can't use fsevents, ensure the options reflect it's disabled. - const canUseFsEvents = FsEventsHandler.canUse(); - if (!canUseFsEvents) opts.useFsEvents = false; - - // Use polling on Mac if not using fsevents. - // Other platforms use non-polling fs_watch. - if (undef(opts, 'usePolling') && !opts.useFsEvents) { - opts.usePolling = isMacos; - } - - // Always default to polling on IBM i because fs.watch() is not available on IBM i. - if(isIBMi) { - opts.usePolling = true; - } - - // Global override (useful for end-developers that need to force polling for all - // instances of chokidar, regardless of usage/dependency depth) - const envPoll = process.env.CHOKIDAR_USEPOLLING; - if (envPoll !== undefined) { - const envLower = envPoll.toLowerCase(); - - if (envLower === 'false' || envLower === '0') { - opts.usePolling = false; - } else if (envLower === 'true' || envLower === '1') { - opts.usePolling = true; - } else { - opts.usePolling = !!envLower; - } - } - const envInterval = process.env.CHOKIDAR_INTERVAL; - if (envInterval) { - opts.interval = Number.parseInt(envInterval, 10); - } - - // Editor atomic write normalization enabled by default with fs.watch - if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents; - if (opts.atomic) this._pendingUnlinks = new Map(); - - if (undef(opts, 'followSymlinks')) opts.followSymlinks = true; - - if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false; - if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {}; - const awf = opts.awaitWriteFinish; - if (awf) { - if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000; - if (!awf.pollInterval) awf.pollInterval = 100; - this._pendingWrites = new Map(); - } - if (opts.ignored) opts.ignored = arrify(opts.ignored); - - let readyCalls = 0; - this._emitReady = () => { - readyCalls++; - if (readyCalls >= this._readyCount) { - this._emitReady = EMPTY_FN; - this._readyEmitted = true; - // use process.nextTick to allow time for listener to be bound - process.nextTick(() => this.emit(EV_READY)); - } - }; - this._emitRaw = (...args) => this.emit(EV_RAW, ...args); - this._readyEmitted = false; - this.options = opts; - - // Initialize with proper watcher. - if (opts.useFsEvents) { - this._fsEventsHandler = new FsEventsHandler(this); - } else { - this._nodeFsHandler = new NodeFsHandler(this); - } - - // You’re frozen when your heart’s not open. - Object.freeze(opts); -} - -// Public methods - -/** - * Adds paths to be watched on an existing FSWatcher instance - * @param {Path|Array} paths_ - * @param {String=} _origAdd private; for handling non-existent paths to be watched - * @param {Boolean=} _internal private; indicates a non-user add - * @returns {FSWatcher} for chaining - */ -add(paths_, _origAdd, _internal) { - const {cwd, disableGlobbing} = this.options; - this.closed = false; - let paths = unifyPaths(paths_); - if (cwd) { - paths = paths.map((path) => { - const absPath = getAbsolutePath(path, cwd); - - // Check `path` instead of `absPath` because the cwd portion can't be a glob - if (disableGlobbing || !isGlob(path)) { - return absPath; - } - return normalizePath(absPath); - }); - } - - // set aside negated glob strings - paths = paths.filter((path) => { - if (path.startsWith(BANG)) { - this._ignoredPaths.add(path.slice(1)); - return false; - } - - // if a path is being added that was previously ignored, stop ignoring it - this._ignoredPaths.delete(path); - this._ignoredPaths.delete(path + SLASH_GLOBSTAR); - - // reset the cached userIgnored anymatch fn - // to make ignoredPaths changes effective - this._userIgnored = undefined; - - return true; - }); - - if (this.options.useFsEvents && this._fsEventsHandler) { - if (!this._readyCount) this._readyCount = paths.length; - if (this.options.persistent) this._readyCount += paths.length; - paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path)); - } else { - if (!this._readyCount) this._readyCount = 0; - this._readyCount += paths.length; - Promise.all( - paths.map(async path => { - const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd); - if (res) this._emitReady(); - return res; - }) - ).then(results => { - if (this.closed) return; - results.filter(item => item).forEach(item => { - this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item)); - }); - }); - } - - return this; -} - -/** - * Close watchers or start ignoring events from specified paths. - * @param {Path|Array} paths_ - string or array of strings, file/directory paths and/or globs - * @returns {FSWatcher} for chaining -*/ -unwatch(paths_) { - if (this.closed) return this; - const paths = unifyPaths(paths_); - const {cwd} = this.options; - - paths.forEach((path) => { - // convert to absolute path unless relative path already matches - if (!sysPath.isAbsolute(path) && !this._closers.has(path)) { - if (cwd) path = sysPath.join(cwd, path); - path = sysPath.resolve(path); - } - - this._closePath(path); - - this._ignoredPaths.add(path); - if (this._watched.has(path)) { - this._ignoredPaths.add(path + SLASH_GLOBSTAR); - } - - // reset the cached userIgnored anymatch fn - // to make ignoredPaths changes effective - this._userIgnored = undefined; - }); - - return this; -} - -/** - * Close watchers and remove all listeners from watched paths. - * @returns {Promise}. -*/ -close() { - if (this.closed) return this._closePromise; - this.closed = true; - - // Memory management. - this.removeAllListeners(); - const closers = []; - this._closers.forEach(closerList => closerList.forEach(closer => { - const promise = closer(); - if (promise instanceof Promise) closers.push(promise); - })); - this._streams.forEach(stream => stream.destroy()); - this._userIgnored = undefined; - this._readyCount = 0; - this._readyEmitted = false; - this._watched.forEach(dirent => dirent.dispose()); - ['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => { - this[`_${key}`].clear(); - }); - - this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve(); - return this._closePromise; -} - -/** - * Expose list of watched paths - * @returns {Object} for chaining -*/ -getWatched() { - const watchList = {}; - this._watched.forEach((entry, dir) => { - const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir; - watchList[key || ONE_DOT] = entry.getChildren().sort(); - }); - return watchList; -} - -emitWithAll(event, args) { - this.emit(...args); - if (event !== EV_ERROR) this.emit(EV_ALL, ...args); -} - -// Common helpers -// -------------- - -/** - * Normalize and emit events. - * Calling _emit DOES NOT MEAN emit() would be called! - * @param {EventName} event Type of event - * @param {Path} path File or directory path - * @param {*=} val1 arguments to be passed with event - * @param {*=} val2 - * @param {*=} val3 - * @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag - */ -async _emit(event, path, val1, val2, val3) { - if (this.closed) return; - - const opts = this.options; - if (isWindows) path = sysPath.normalize(path); - if (opts.cwd) path = sysPath.relative(opts.cwd, path); - /** @type Array */ - const args = [event, path]; - if (val3 !== undefined) args.push(val1, val2, val3); - else if (val2 !== undefined) args.push(val1, val2); - else if (val1 !== undefined) args.push(val1); - - const awf = opts.awaitWriteFinish; - let pw; - if (awf && (pw = this._pendingWrites.get(path))) { - pw.lastChange = new Date(); - return this; - } - - if (opts.atomic) { - if (event === EV_UNLINK) { - this._pendingUnlinks.set(path, args); - setTimeout(() => { - this._pendingUnlinks.forEach((entry, path) => { - this.emit(...entry); - this.emit(EV_ALL, ...entry); - this._pendingUnlinks.delete(path); - }); - }, typeof opts.atomic === 'number' ? opts.atomic : 100); - return this; - } - if (event === EV_ADD && this._pendingUnlinks.has(path)) { - event = args[0] = EV_CHANGE; - this._pendingUnlinks.delete(path); - } - } - - if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) { - const awfEmit = (err, stats) => { - if (err) { - event = args[0] = EV_ERROR; - args[1] = err; - this.emitWithAll(event, args); - } else if (stats) { - // if stats doesn't exist the file must have been deleted - if (args.length > 2) { - args[2] = stats; - } else { - args.push(stats); - } - this.emitWithAll(event, args); - } - }; - - this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit); - return this; - } - - if (event === EV_CHANGE) { - const isThrottled = !this._throttle(EV_CHANGE, path, 50); - if (isThrottled) return this; - } - - if (opts.alwaysStat && val1 === undefined && - (event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE) - ) { - const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path; - let stats; - try { - stats = await stat(fullPath); - } catch (err) {} - // Suppress event when fs_stat fails, to avoid sending undefined 'stat' - if (!stats || this.closed) return; - args.push(stats); - } - this.emitWithAll(event, args); - - return this; -} - -/** - * Common handler for errors - * @param {Error} error - * @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag - */ -_handleError(error) { - const code = error && error.code; - if (error && code !== 'ENOENT' && code !== 'ENOTDIR' && - (!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES')) - ) { - this.emit(EV_ERROR, error); - } - return error || this.closed; -} - -/** - * Helper utility for throttling - * @param {ThrottleType} actionType type being throttled - * @param {Path} path being acted upon - * @param {Number} timeout duration of time to suppress duplicate actions - * @returns {Object|false} tracking object or false if action should be suppressed - */ -_throttle(actionType, path, timeout) { - if (!this._throttled.has(actionType)) { - this._throttled.set(actionType, new Map()); - } - - /** @type {Map} */ - const action = this._throttled.get(actionType); - /** @type {Object} */ - const actionPath = action.get(path); - - if (actionPath) { - actionPath.count++; - return false; - } - - let timeoutObject; - const clear = () => { - const item = action.get(path); - const count = item ? item.count : 0; - action.delete(path); - clearTimeout(timeoutObject); - if (item) clearTimeout(item.timeoutObject); - return count; - }; - timeoutObject = setTimeout(clear, timeout); - const thr = {timeoutObject, clear, count: 0}; - action.set(path, thr); - return thr; -} - -_incrReadyCount() { - return this._readyCount++; -} - -/** - * Awaits write operation to finish. - * Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback. - * @param {Path} path being acted upon - * @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished - * @param {EventName} event - * @param {Function} awfEmit Callback to be called when ready for event to be emitted. - */ -_awaitWriteFinish(path, threshold, event, awfEmit) { - let timeoutHandler; - - let fullPath = path; - if (this.options.cwd && !sysPath.isAbsolute(path)) { - fullPath = sysPath.join(this.options.cwd, path); - } - - const now = new Date(); - - const awaitWriteFinish = (prevStat) => { - fs.stat(fullPath, (err, curStat) => { - if (err || !this._pendingWrites.has(path)) { - if (err && err.code !== 'ENOENT') awfEmit(err); - return; - } - - const now = Number(new Date()); - - if (prevStat && curStat.size !== prevStat.size) { - this._pendingWrites.get(path).lastChange = now; - } - const pw = this._pendingWrites.get(path); - const df = now - pw.lastChange; - - if (df >= threshold) { - this._pendingWrites.delete(path); - awfEmit(undefined, curStat); - } else { - timeoutHandler = setTimeout( - awaitWriteFinish, - this.options.awaitWriteFinish.pollInterval, - curStat - ); - } - }); - }; - - if (!this._pendingWrites.has(path)) { - this._pendingWrites.set(path, { - lastChange: now, - cancelWait: () => { - this._pendingWrites.delete(path); - clearTimeout(timeoutHandler); - return event; - } - }); - timeoutHandler = setTimeout( - awaitWriteFinish, - this.options.awaitWriteFinish.pollInterval - ); - } -} - -_getGlobIgnored() { - return [...this._ignoredPaths.values()]; -} - -/** - * Determines whether user has asked to ignore this path. - * @param {Path} path filepath or dir - * @param {fs.Stats=} stats result of fs.stat - * @returns {Boolean} - */ -_isIgnored(path, stats) { - if (this.options.atomic && DOT_RE.test(path)) return true; - if (!this._userIgnored) { - const {cwd} = this.options; - const ign = this.options.ignored; - - const ignored = ign && ign.map(normalizeIgnored(cwd)); - const paths = arrify(ignored) - .filter((path) => typeof path === STRING_TYPE && !isGlob(path)) - .map((path) => path + SLASH_GLOBSTAR); - const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths); - this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS); - } - - return this._userIgnored([path, stats]); -} - -_isntIgnored(path, stat) { - return !this._isIgnored(path, stat); -} - -/** - * Provides a set of common helpers and properties relating to symlink and glob handling. - * @param {Path} path file, directory, or glob pattern being watched - * @param {Number=} depth at any depth > 0, this isn't a glob - * @returns {WatchHelper} object containing helpers for this path - */ -_getWatchHelpers(path, depth) { - const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path); - const follow = this.options.followSymlinks; - - return new WatchHelper(path, watchPath, follow, this); -} - -// Directory helpers -// ----------------- - -/** - * Provides directory tracking objects - * @param {String} directory path of the directory - * @returns {DirEntry} the directory's tracking object - */ -_getWatchedDir(directory) { - if (!this._boundRemove) this._boundRemove = this._remove.bind(this); - const dir = sysPath.resolve(directory); - if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove)); - return this._watched.get(dir); -} - -// File helpers -// ------------ - -/** - * Check for read permissions. - * Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405 - * @param {fs.Stats} stats - object, result of fs_stat - * @returns {Boolean} indicates whether the file can be read -*/ -_hasReadPermissions(stats) { - if (this.options.ignorePermissionErrors) return true; - - // stats.mode may be bigint - const md = stats && Number.parseInt(stats.mode, 10); - const st = md & 0o777; - const it = Number.parseInt(st.toString(8)[0], 10); - return Boolean(4 & it); -} - -/** - * Handles emitting unlink events for - * files and directories, and via recursion, for - * files and directories within directories that are unlinked - * @param {String} directory within which the following item is located - * @param {String} item base path of item/directory - * @returns {void} -*/ -_remove(directory, item, isDirectory) { - // if what is being deleted is a directory, get that directory's paths - // for recursive deleting and cleaning of watched object - // if it is not a directory, nestedDirectoryChildren will be empty array - const path = sysPath.join(directory, item); - const fullPath = sysPath.resolve(path); - isDirectory = isDirectory != null - ? isDirectory - : this._watched.has(path) || this._watched.has(fullPath); - - // prevent duplicate handling in case of arriving here nearly simultaneously - // via multiple paths (such as _handleFile and _handleDir) - if (!this._throttle('remove', path, 100)) return; - - // if the only watched file is removed, watch for its return - if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) { - this.add(directory, item, true); - } - - // This will create a new entry in the watched object in either case - // so we got to do the directory check beforehand - const wp = this._getWatchedDir(path); - const nestedDirectoryChildren = wp.getChildren(); - - // Recursively remove children directories / files. - nestedDirectoryChildren.forEach(nested => this._remove(path, nested)); - - // Check if item was on the watched list and remove it - const parent = this._getWatchedDir(directory); - const wasTracked = parent.has(item); - parent.remove(item); - - // Fixes issue #1042 -> Relative paths were detected and added as symlinks - // (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612), - // but never removed from the map in case the path was deleted. - // This leads to an incorrect state if the path was recreated: - // https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553 - if (this._symlinkPaths.has(fullPath)) { - this._symlinkPaths.delete(fullPath); - } - - // If we wait for this file to be fully written, cancel the wait. - let relPath = path; - if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path); - if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) { - const event = this._pendingWrites.get(relPath).cancelWait(); - if (event === EV_ADD) return; - } - - // The Entry will either be a directory that just got removed - // or a bogus entry to a file, in either case we have to remove it - this._watched.delete(path); - this._watched.delete(fullPath); - const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK; - if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path); - - // Avoid conflicts if we later create another file with the same name - if (!this.options.useFsEvents) { - this._closePath(path); - } -} - -/** - * Closes all watchers for a path - * @param {Path} path - */ -_closePath(path) { - this._closeFile(path) - const dir = sysPath.dirname(path); - this._getWatchedDir(dir).remove(sysPath.basename(path)); -} - -/** - * Closes only file-specific watchers - * @param {Path} path - */ -_closeFile(path) { - const closers = this._closers.get(path); - if (!closers) return; - closers.forEach(closer => closer()); - this._closers.delete(path); -} - -/** - * - * @param {Path} path - * @param {Function} closer - */ -_addPathCloser(path, closer) { - if (!closer) return; - let list = this._closers.get(path); - if (!list) { - list = []; - this._closers.set(path, list); - } - list.push(closer); -} - -_readdirp(root, opts) { - if (this.closed) return; - const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts}; - let stream = readdirp(root, options); - this._streams.add(stream); - stream.once(STR_CLOSE, () => { - stream = undefined; - }); - stream.once(STR_END, () => { - if (stream) { - this._streams.delete(stream); - stream = undefined; - } - }); - return stream; -} - -} - -// Export FSWatcher class -exports.FSWatcher = FSWatcher; - -/** - * Instantiates watcher with paths to be tracked. - * @param {String|Array} paths file/directory paths and/or globs - * @param {Object=} options chokidar opts - * @returns an instance of FSWatcher for chaining. - */ -const watch = (paths, options) => { - const watcher = new FSWatcher(options); - watcher.add(paths); - return watcher; -}; - -exports.watch = watch; diff --git a/node_modules/chokidar/lib/constants.js b/node_modules/chokidar/lib/constants.js deleted file mode 100644 index 4743865..0000000 --- a/node_modules/chokidar/lib/constants.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict'; - -const {sep} = require('path'); -const {platform} = process; -const os = require('os'); - -exports.EV_ALL = 'all'; -exports.EV_READY = 'ready'; -exports.EV_ADD = 'add'; -exports.EV_CHANGE = 'change'; -exports.EV_ADD_DIR = 'addDir'; -exports.EV_UNLINK = 'unlink'; -exports.EV_UNLINK_DIR = 'unlinkDir'; -exports.EV_RAW = 'raw'; -exports.EV_ERROR = 'error'; - -exports.STR_DATA = 'data'; -exports.STR_END = 'end'; -exports.STR_CLOSE = 'close'; - -exports.FSEVENT_CREATED = 'created'; -exports.FSEVENT_MODIFIED = 'modified'; -exports.FSEVENT_DELETED = 'deleted'; -exports.FSEVENT_MOVED = 'moved'; -exports.FSEVENT_CLONED = 'cloned'; -exports.FSEVENT_UNKNOWN = 'unknown'; -exports.FSEVENT_FLAG_MUST_SCAN_SUBDIRS = 1; -exports.FSEVENT_TYPE_FILE = 'file'; -exports.FSEVENT_TYPE_DIRECTORY = 'directory'; -exports.FSEVENT_TYPE_SYMLINK = 'symlink'; - -exports.KEY_LISTENERS = 'listeners'; -exports.KEY_ERR = 'errHandlers'; -exports.KEY_RAW = 'rawEmitters'; -exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW]; - -exports.DOT_SLASH = `.${sep}`; - -exports.BACK_SLASH_RE = /\\/g; -exports.DOUBLE_SLASH_RE = /\/\//; -exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/; -exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/; -exports.REPLACER_RE = /^\.[/\\]/; - -exports.SLASH = '/'; -exports.SLASH_SLASH = '//'; -exports.BRACE_START = '{'; -exports.BANG = '!'; -exports.ONE_DOT = '.'; -exports.TWO_DOTS = '..'; -exports.STAR = '*'; -exports.GLOBSTAR = '**'; -exports.ROOT_GLOBSTAR = '/**/*'; -exports.SLASH_GLOBSTAR = '/**'; -exports.DIR_SUFFIX = 'Dir'; -exports.ANYMATCH_OPTS = {dot: true}; -exports.STRING_TYPE = 'string'; -exports.FUNCTION_TYPE = 'function'; -exports.EMPTY_STR = ''; -exports.EMPTY_FN = () => {}; -exports.IDENTITY_FN = val => val; - -exports.isWindows = platform === 'win32'; -exports.isMacos = platform === 'darwin'; -exports.isLinux = platform === 'linux'; -exports.isIBMi = os.type() === 'OS400'; diff --git a/node_modules/chokidar/lib/fsevents-handler.js b/node_modules/chokidar/lib/fsevents-handler.js deleted file mode 100644 index fe29393..0000000 --- a/node_modules/chokidar/lib/fsevents-handler.js +++ /dev/null @@ -1,526 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const sysPath = require('path'); -const { promisify } = require('util'); - -let fsevents; -try { - fsevents = require('fsevents'); -} catch (error) { - if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error); -} - -if (fsevents) { - // TODO: real check - const mtch = process.version.match(/v(\d+)\.(\d+)/); - if (mtch && mtch[1] && mtch[2]) { - const maj = Number.parseInt(mtch[1], 10); - const min = Number.parseInt(mtch[2], 10); - if (maj === 8 && min < 16) { - fsevents = undefined; - } - } -} - -const { - EV_ADD, - EV_CHANGE, - EV_ADD_DIR, - EV_UNLINK, - EV_ERROR, - STR_DATA, - STR_END, - FSEVENT_CREATED, - FSEVENT_MODIFIED, - FSEVENT_DELETED, - FSEVENT_MOVED, - // FSEVENT_CLONED, - FSEVENT_UNKNOWN, - FSEVENT_FLAG_MUST_SCAN_SUBDIRS, - FSEVENT_TYPE_FILE, - FSEVENT_TYPE_DIRECTORY, - FSEVENT_TYPE_SYMLINK, - - ROOT_GLOBSTAR, - DIR_SUFFIX, - DOT_SLASH, - FUNCTION_TYPE, - EMPTY_FN, - IDENTITY_FN -} = require('./constants'); - -const Depth = (value) => isNaN(value) ? {} : {depth: value}; - -const stat = promisify(fs.stat); -const lstat = promisify(fs.lstat); -const realpath = promisify(fs.realpath); - -const statMethods = { stat, lstat }; - -/** - * @typedef {String} Path - */ - -/** - * @typedef {Object} FsEventsWatchContainer - * @property {Set} listeners - * @property {Function} rawEmitter - * @property {{stop: Function}} watcher - */ - -// fsevents instance helper functions -/** - * Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances) - * @type {Map} - */ -const FSEventsWatchers = new Map(); - -// Threshold of duplicate path prefixes at which to start -// consolidating going forward -const consolidateThreshhold = 10; - -const wrongEventFlags = new Set([ - 69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912 -]); - -/** - * Instantiates the fsevents interface - * @param {Path} path path to be watched - * @param {Function} callback called when fsevents is bound and ready - * @returns {{stop: Function}} new fsevents instance - */ -const createFSEventsInstance = (path, callback) => { - const stop = fsevents.watch(path, callback); - return {stop}; -}; - -/** - * Instantiates the fsevents interface or binds listeners to an existing one covering - * the same file tree. - * @param {Path} path - to be watched - * @param {Path} realPath - real path for symlinks - * @param {Function} listener - called when fsevents emits events - * @param {Function} rawEmitter - passes data to listeners of the 'raw' event - * @returns {Function} closer - */ -function setFSEventsListener(path, realPath, listener, rawEmitter) { - let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath; - - const parentPath = sysPath.dirname(watchPath); - let cont = FSEventsWatchers.get(watchPath); - - // If we've accumulated a substantial number of paths that - // could have been consolidated by watching one directory - // above the current one, create a watcher on the parent - // path instead, so that we do consolidate going forward. - if (couldConsolidate(parentPath)) { - watchPath = parentPath; - } - - const resolvedPath = sysPath.resolve(path); - const hasSymlink = resolvedPath !== realPath; - - const filteredListener = (fullPath, flags, info) => { - if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath); - if ( - fullPath === resolvedPath || - !fullPath.indexOf(resolvedPath + sysPath.sep) - ) listener(fullPath, flags, info); - }; - - // check if there is already a watcher on a parent path - // modifies `watchPath` to the parent path when it finds a match - let watchedParent = false; - for (const watchedPath of FSEventsWatchers.keys()) { - if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) { - watchPath = watchedPath; - cont = FSEventsWatchers.get(watchPath); - watchedParent = true; - break; - } - } - - if (cont || watchedParent) { - cont.listeners.add(filteredListener); - } else { - cont = { - listeners: new Set([filteredListener]), - rawEmitter, - watcher: createFSEventsInstance(watchPath, (fullPath, flags) => { - if (!cont.listeners.size) return; - if (flags & FSEVENT_FLAG_MUST_SCAN_SUBDIRS) return; - const info = fsevents.getInfo(fullPath, flags); - cont.listeners.forEach(list => { - list(fullPath, flags, info); - }); - - cont.rawEmitter(info.event, fullPath, info); - }) - }; - FSEventsWatchers.set(watchPath, cont); - } - - // removes this instance's listeners and closes the underlying fsevents - // instance if there are no more listeners left - return () => { - const lst = cont.listeners; - - lst.delete(filteredListener); - if (!lst.size) { - FSEventsWatchers.delete(watchPath); - if (cont.watcher) return cont.watcher.stop().then(() => { - cont.rawEmitter = cont.watcher = undefined; - Object.freeze(cont); - }); - } - }; -} - -// Decide whether or not we should start a new higher-level -// parent watcher -const couldConsolidate = (path) => { - let count = 0; - for (const watchPath of FSEventsWatchers.keys()) { - if (watchPath.indexOf(path) === 0) { - count++; - if (count >= consolidateThreshhold) { - return true; - } - } - } - - return false; -}; - -// returns boolean indicating whether fsevents can be used -const canUse = () => fsevents && FSEventsWatchers.size < 128; - -// determines subdirectory traversal levels from root to path -const calcDepth = (path, root) => { - let i = 0; - while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++; - return i; -}; - -// returns boolean indicating whether the fsevents' event info has the same type -// as the one returned by fs.stat -const sameTypes = (info, stats) => ( - info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() || - info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() || - info.type === FSEVENT_TYPE_FILE && stats.isFile() -) - -/** - * @mixin - */ -class FsEventsHandler { - -/** - * @param {import('../index').FSWatcher} fsw - */ -constructor(fsw) { - this.fsw = fsw; -} -checkIgnored(path, stats) { - const ipaths = this.fsw._ignoredPaths; - if (this.fsw._isIgnored(path, stats)) { - ipaths.add(path); - if (stats && stats.isDirectory()) { - ipaths.add(path + ROOT_GLOBSTAR); - } - return true; - } - - ipaths.delete(path); - ipaths.delete(path + ROOT_GLOBSTAR); -} - -addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) { - const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD; - this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts); -} - -async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) { - try { - const stats = await stat(path) - if (this.fsw.closed) return; - if (sameTypes(info, stats)) { - this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); - } else { - this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); - } - } catch (error) { - if (error.code === 'EACCES') { - this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); - } else { - this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); - } - } -} - -handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) { - if (this.fsw.closed || this.checkIgnored(path)) return; - - if (event === EV_UNLINK) { - const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY - // suppress unlink events on never before seen files - if (isDirectory || watchedDir.has(item)) { - this.fsw._remove(parent, item, isDirectory); - } - } else { - if (event === EV_ADD) { - // track new directories - if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path); - - if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) { - // push symlinks back to the top of the stack to get handled - const curDepth = opts.depth === undefined ? - undefined : calcDepth(fullPath, realPath) + 1; - return this._addToFsEvents(path, false, true, curDepth); - } - - // track new paths - // (other than symlinks being followed, which will be tracked soon) - this.fsw._getWatchedDir(parent).add(item); - } - /** - * @type {'add'|'addDir'|'unlink'|'unlinkDir'} - */ - const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event; - this.fsw._emit(eventName, path); - if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true); - } -} - -/** - * Handle symlinks encountered during directory scan - * @param {String} watchPath - file/dir path to be watched with fsevents - * @param {String} realPath - real path (in case of symlinks) - * @param {Function} transform - path transformer - * @param {Function} globFilter - path filter in case a glob pattern was provided - * @returns {Function} closer for the watcher instance -*/ -_watchWithFsEvents(watchPath, realPath, transform, globFilter) { - if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return; - const opts = this.fsw.options; - const watchCallback = async (fullPath, flags, info) => { - if (this.fsw.closed) return; - if ( - opts.depth !== undefined && - calcDepth(fullPath, realPath) > opts.depth - ) return; - const path = transform(sysPath.join( - watchPath, sysPath.relative(watchPath, fullPath) - )); - if (globFilter && !globFilter(path)) return; - // ensure directories are tracked - const parent = sysPath.dirname(path); - const item = sysPath.basename(path); - const watchedDir = this.fsw._getWatchedDir( - info.type === FSEVENT_TYPE_DIRECTORY ? path : parent - ); - - // correct for wrong events emitted - if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) { - if (typeof opts.ignored === FUNCTION_TYPE) { - let stats; - try { - stats = await stat(path); - } catch (error) {} - if (this.fsw.closed) return; - if (this.checkIgnored(path, stats)) return; - if (sameTypes(info, stats)) { - this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); - } else { - this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts); - } - } else { - this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); - } - } else { - switch (info.event) { - case FSEVENT_CREATED: - case FSEVENT_MODIFIED: - return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts); - case FSEVENT_DELETED: - case FSEVENT_MOVED: - return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts); - } - } - }; - - const closer = setFSEventsListener( - watchPath, - realPath, - watchCallback, - this.fsw._emitRaw - ); - - this.fsw._emitReady(); - return closer; -} - -/** - * Handle symlinks encountered during directory scan - * @param {String} linkPath path to symlink - * @param {String} fullPath absolute path to the symlink - * @param {Function} transform pre-existing path transformer - * @param {Number} curDepth level of subdirectories traversed to where symlink is - * @returns {Promise} - */ -async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) { - // don't follow the same symlink more than once - if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return; - - this.fsw._symlinkPaths.set(fullPath, true); - this.fsw._incrReadyCount(); - - try { - const linkTarget = await realpath(linkPath); - if (this.fsw.closed) return; - if (this.fsw._isIgnored(linkTarget)) { - return this.fsw._emitReady(); - } - - this.fsw._incrReadyCount(); - - // add the linkTarget for watching with a wrapper for transform - // that causes emitted paths to incorporate the link's path - this._addToFsEvents(linkTarget || linkPath, (path) => { - let aliasedPath = linkPath; - if (linkTarget && linkTarget !== DOT_SLASH) { - aliasedPath = path.replace(linkTarget, linkPath); - } else if (path !== DOT_SLASH) { - aliasedPath = sysPath.join(linkPath, path); - } - return transform(aliasedPath); - }, false, curDepth); - } catch(error) { - if (this.fsw._handleError(error)) { - return this.fsw._emitReady(); - } - } -} - -/** - * - * @param {Path} newPath - * @param {fs.Stats} stats - */ -emitAdd(newPath, stats, processPath, opts, forceAdd) { - const pp = processPath(newPath); - const isDir = stats.isDirectory(); - const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp)); - const base = sysPath.basename(pp); - - // ensure empty dirs get tracked - if (isDir) this.fsw._getWatchedDir(pp); - if (dirObj.has(base)) return; - dirObj.add(base); - - if (!opts.ignoreInitial || forceAdd === true) { - this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats); - } -} - -initWatch(realPath, path, wh, processPath) { - if (this.fsw.closed) return; - const closer = this._watchWithFsEvents( - wh.watchPath, - sysPath.resolve(realPath || wh.watchPath), - processPath, - wh.globFilter - ); - this.fsw._addPathCloser(path, closer); -} - -/** - * Handle added path with fsevents - * @param {String} path file/dir path or glob pattern - * @param {Function|Boolean=} transform converts working path to what the user expects - * @param {Boolean=} forceAdd ensure add is emitted - * @param {Number=} priorDepth Level of subdirectories already traversed. - * @returns {Promise} - */ -async _addToFsEvents(path, transform, forceAdd, priorDepth) { - if (this.fsw.closed) { - return; - } - const opts = this.fsw.options; - const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN; - - const wh = this.fsw._getWatchHelpers(path); - - // evaluate what is at the path we're being asked to watch - try { - const stats = await statMethods[wh.statMethod](wh.watchPath); - if (this.fsw.closed) return; - if (this.fsw._isIgnored(wh.watchPath, stats)) { - throw null; - } - if (stats.isDirectory()) { - // emit addDir unless this is a glob parent - if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd); - - // don't recurse further if it would exceed depth setting - if (priorDepth && priorDepth > opts.depth) return; - - // scan the contents of the dir - this.fsw._readdirp(wh.watchPath, { - fileFilter: entry => wh.filterPath(entry), - directoryFilter: entry => wh.filterDir(entry), - ...Depth(opts.depth - (priorDepth || 0)) - }).on(STR_DATA, (entry) => { - // need to check filterPath on dirs b/c filterDir is less restrictive - if (this.fsw.closed) { - return; - } - if (entry.stats.isDirectory() && !wh.filterPath(entry)) return; - - const joinedPath = sysPath.join(wh.watchPath, entry.path); - const {fullPath} = entry; - - if (wh.followSymlinks && entry.stats.isSymbolicLink()) { - // preserve the current depth here since it can't be derived from - // real paths past the symlink - const curDepth = opts.depth === undefined ? - undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1; - - this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth); - } else { - this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd); - } - }).on(EV_ERROR, EMPTY_FN).on(STR_END, () => { - this.fsw._emitReady(); - }); - } else { - this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd); - this.fsw._emitReady(); - } - } catch (error) { - if (!error || this.fsw._handleError(error)) { - // TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__- - this.fsw._emitReady(); - this.fsw._emitReady(); - } - } - - if (opts.persistent && forceAdd !== true) { - if (typeof transform === FUNCTION_TYPE) { - // realpath has already been resolved - this.initWatch(undefined, path, wh, processPath); - } else { - let realPath; - try { - realPath = await realpath(wh.watchPath); - } catch (e) {} - this.initWatch(realPath, path, wh, processPath); - } - } -} - -} - -module.exports = FsEventsHandler; -module.exports.canUse = canUse; diff --git a/node_modules/chokidar/lib/nodefs-handler.js b/node_modules/chokidar/lib/nodefs-handler.js deleted file mode 100644 index 199cfe9..0000000 --- a/node_modules/chokidar/lib/nodefs-handler.js +++ /dev/null @@ -1,654 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const sysPath = require('path'); -const { promisify } = require('util'); -const isBinaryPath = require('is-binary-path'); -const { - isWindows, - isLinux, - EMPTY_FN, - EMPTY_STR, - KEY_LISTENERS, - KEY_ERR, - KEY_RAW, - HANDLER_KEYS, - EV_CHANGE, - EV_ADD, - EV_ADD_DIR, - EV_ERROR, - STR_DATA, - STR_END, - BRACE_START, - STAR -} = require('./constants'); - -const THROTTLE_MODE_WATCH = 'watch'; - -const open = promisify(fs.open); -const stat = promisify(fs.stat); -const lstat = promisify(fs.lstat); -const close = promisify(fs.close); -const fsrealpath = promisify(fs.realpath); - -const statMethods = { lstat, stat }; - -// TODO: emit errors properly. Example: EMFILE on Macos. -const foreach = (val, fn) => { - if (val instanceof Set) { - val.forEach(fn); - } else { - fn(val); - } -}; - -const addAndConvert = (main, prop, item) => { - let container = main[prop]; - if (!(container instanceof Set)) { - main[prop] = container = new Set([container]); - } - container.add(item); -}; - -const clearItem = cont => key => { - const set = cont[key]; - if (set instanceof Set) { - set.clear(); - } else { - delete cont[key]; - } -}; - -const delFromSet = (main, prop, item) => { - const container = main[prop]; - if (container instanceof Set) { - container.delete(item); - } else if (container === item) { - delete main[prop]; - } -}; - -const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val; - -/** - * @typedef {String} Path - */ - -// fs_watch helpers - -// object to hold per-process fs_watch instances -// (may be shared across chokidar FSWatcher instances) - -/** - * @typedef {Object} FsWatchContainer - * @property {Set} listeners - * @property {Set} errHandlers - * @property {Set} rawEmitters - * @property {fs.FSWatcher=} watcher - * @property {Boolean=} watcherUnusable - */ - -/** - * @type {Map} - */ -const FsWatchInstances = new Map(); - -/** - * Instantiates the fs_watch interface - * @param {String} path to be watched - * @param {Object} options to be passed to fs_watch - * @param {Function} listener main event handler - * @param {Function} errHandler emits info about errors - * @param {Function} emitRaw emits raw event data - * @returns {fs.FSWatcher} new fsevents instance - */ -function createFsWatchInstance(path, options, listener, errHandler, emitRaw) { - const handleEvent = (rawEvent, evPath) => { - listener(path); - emitRaw(rawEvent, evPath, {watchedPath: path}); - - // emit based on events occurring for files from a directory's watcher in - // case the file's watcher misses it (and rely on throttling to de-dupe) - if (evPath && path !== evPath) { - fsWatchBroadcast( - sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath) - ); - } - }; - try { - return fs.watch(path, options, handleEvent); - } catch (error) { - errHandler(error); - } -} - -/** - * Helper for passing fs_watch event data to a collection of listeners - * @param {Path} fullPath absolute path bound to fs_watch instance - * @param {String} type listener type - * @param {*=} val1 arguments to be passed to listeners - * @param {*=} val2 - * @param {*=} val3 - */ -const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => { - const cont = FsWatchInstances.get(fullPath); - if (!cont) return; - foreach(cont[type], (listener) => { - listener(val1, val2, val3); - }); -}; - -/** - * Instantiates the fs_watch interface or binds listeners - * to an existing one covering the same file system entry - * @param {String} path - * @param {String} fullPath absolute path - * @param {Object} options to be passed to fs_watch - * @param {Object} handlers container for event listener functions - */ -const setFsWatchListener = (path, fullPath, options, handlers) => { - const {listener, errHandler, rawEmitter} = handlers; - let cont = FsWatchInstances.get(fullPath); - - /** @type {fs.FSWatcher=} */ - let watcher; - if (!options.persistent) { - watcher = createFsWatchInstance( - path, options, listener, errHandler, rawEmitter - ); - return watcher.close.bind(watcher); - } - if (cont) { - addAndConvert(cont, KEY_LISTENERS, listener); - addAndConvert(cont, KEY_ERR, errHandler); - addAndConvert(cont, KEY_RAW, rawEmitter); - } else { - watcher = createFsWatchInstance( - path, - options, - fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), - errHandler, // no need to use broadcast here - fsWatchBroadcast.bind(null, fullPath, KEY_RAW) - ); - if (!watcher) return; - watcher.on(EV_ERROR, async (error) => { - const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR); - cont.watcherUnusable = true; // documented since Node 10.4.1 - // Workaround for https://github.com/joyent/node/issues/4337 - if (isWindows && error.code === 'EPERM') { - try { - const fd = await open(path, 'r'); - await close(fd); - broadcastErr(error); - } catch (err) {} - } else { - broadcastErr(error); - } - }); - cont = { - listeners: listener, - errHandlers: errHandler, - rawEmitters: rawEmitter, - watcher - }; - FsWatchInstances.set(fullPath, cont); - } - // const index = cont.listeners.indexOf(listener); - - // removes this instance's listeners and closes the underlying fs_watch - // instance if there are no more listeners left - return () => { - delFromSet(cont, KEY_LISTENERS, listener); - delFromSet(cont, KEY_ERR, errHandler); - delFromSet(cont, KEY_RAW, rawEmitter); - if (isEmptySet(cont.listeners)) { - // Check to protect against issue gh-730. - // if (cont.watcherUnusable) { - cont.watcher.close(); - // } - FsWatchInstances.delete(fullPath); - HANDLER_KEYS.forEach(clearItem(cont)); - cont.watcher = undefined; - Object.freeze(cont); - } - }; -}; - -// fs_watchFile helpers - -// object to hold per-process fs_watchFile instances -// (may be shared across chokidar FSWatcher instances) -const FsWatchFileInstances = new Map(); - -/** - * Instantiates the fs_watchFile interface or binds listeners - * to an existing one covering the same file system entry - * @param {String} path to be watched - * @param {String} fullPath absolute path - * @param {Object} options options to be passed to fs_watchFile - * @param {Object} handlers container for event listener functions - * @returns {Function} closer - */ -const setFsWatchFileListener = (path, fullPath, options, handlers) => { - const {listener, rawEmitter} = handlers; - let cont = FsWatchFileInstances.get(fullPath); - - /* eslint-disable no-unused-vars, prefer-destructuring */ - let listeners = new Set(); - let rawEmitters = new Set(); - - const copts = cont && cont.options; - if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) { - // "Upgrade" the watcher to persistence or a quicker interval. - // This creates some unlikely edge case issues if the user mixes - // settings in a very weird way, but solving for those cases - // doesn't seem worthwhile for the added complexity. - listeners = cont.listeners; - rawEmitters = cont.rawEmitters; - fs.unwatchFile(fullPath); - cont = undefined; - } - - /* eslint-enable no-unused-vars, prefer-destructuring */ - - if (cont) { - addAndConvert(cont, KEY_LISTENERS, listener); - addAndConvert(cont, KEY_RAW, rawEmitter); - } else { - // TODO - // listeners.add(listener); - // rawEmitters.add(rawEmitter); - cont = { - listeners: listener, - rawEmitters: rawEmitter, - options, - watcher: fs.watchFile(fullPath, options, (curr, prev) => { - foreach(cont.rawEmitters, (rawEmitter) => { - rawEmitter(EV_CHANGE, fullPath, {curr, prev}); - }); - const currmtime = curr.mtimeMs; - if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) { - foreach(cont.listeners, (listener) => listener(path, curr)); - } - }) - }; - FsWatchFileInstances.set(fullPath, cont); - } - // const index = cont.listeners.indexOf(listener); - - // Removes this instance's listeners and closes the underlying fs_watchFile - // instance if there are no more listeners left. - return () => { - delFromSet(cont, KEY_LISTENERS, listener); - delFromSet(cont, KEY_RAW, rawEmitter); - if (isEmptySet(cont.listeners)) { - FsWatchFileInstances.delete(fullPath); - fs.unwatchFile(fullPath); - cont.options = cont.watcher = undefined; - Object.freeze(cont); - } - }; -}; - -/** - * @mixin - */ -class NodeFsHandler { - -/** - * @param {import("../index").FSWatcher} fsW - */ -constructor(fsW) { - this.fsw = fsW; - this._boundHandleError = (error) => fsW._handleError(error); -} - -/** - * Watch file for changes with fs_watchFile or fs_watch. - * @param {String} path to file or dir - * @param {Function} listener on fs change - * @returns {Function} closer for the watcher instance - */ -_watchWithNodeFs(path, listener) { - const opts = this.fsw.options; - const directory = sysPath.dirname(path); - const basename = sysPath.basename(path); - const parent = this.fsw._getWatchedDir(directory); - parent.add(basename); - const absolutePath = sysPath.resolve(path); - const options = {persistent: opts.persistent}; - if (!listener) listener = EMPTY_FN; - - let closer; - if (opts.usePolling) { - options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ? - opts.binaryInterval : opts.interval; - closer = setFsWatchFileListener(path, absolutePath, options, { - listener, - rawEmitter: this.fsw._emitRaw - }); - } else { - closer = setFsWatchListener(path, absolutePath, options, { - listener, - errHandler: this._boundHandleError, - rawEmitter: this.fsw._emitRaw - }); - } - return closer; -} - -/** - * Watch a file and emit add event if warranted. - * @param {Path} file Path - * @param {fs.Stats} stats result of fs_stat - * @param {Boolean} initialAdd was the file added at watch instantiation? - * @returns {Function} closer for the watcher instance - */ -_handleFile(file, stats, initialAdd) { - if (this.fsw.closed) { - return; - } - const dirname = sysPath.dirname(file); - const basename = sysPath.basename(file); - const parent = this.fsw._getWatchedDir(dirname); - // stats is always present - let prevStats = stats; - - // if the file is already being watched, do nothing - if (parent.has(basename)) return; - - const listener = async (path, newStats) => { - if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return; - if (!newStats || newStats.mtimeMs === 0) { - try { - const newStats = await stat(file); - if (this.fsw.closed) return; - // Check that change event was not fired because of changed only accessTime. - const at = newStats.atimeMs; - const mt = newStats.mtimeMs; - if (!at || at <= mt || mt !== prevStats.mtimeMs) { - this.fsw._emit(EV_CHANGE, file, newStats); - } - if (isLinux && prevStats.ino !== newStats.ino) { - this.fsw._closeFile(path) - prevStats = newStats; - this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener)); - } else { - prevStats = newStats; - } - } catch (error) { - // Fix issues where mtime is null but file is still present - this.fsw._remove(dirname, basename); - } - // add is about to be emitted if file not already tracked in parent - } else if (parent.has(basename)) { - // Check that change event was not fired because of changed only accessTime. - const at = newStats.atimeMs; - const mt = newStats.mtimeMs; - if (!at || at <= mt || mt !== prevStats.mtimeMs) { - this.fsw._emit(EV_CHANGE, file, newStats); - } - prevStats = newStats; - } - } - // kick off the watcher - const closer = this._watchWithNodeFs(file, listener); - - // emit an add event if we're supposed to - if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) { - if (!this.fsw._throttle(EV_ADD, file, 0)) return; - this.fsw._emit(EV_ADD, file, stats); - } - - return closer; -} - -/** - * Handle symlinks encountered while reading a dir. - * @param {Object} entry returned by readdirp - * @param {String} directory path of dir being read - * @param {String} path of this item - * @param {String} item basename of this item - * @returns {Promise} true if no more processing is needed for this entry. - */ -async _handleSymlink(entry, directory, path, item) { - if (this.fsw.closed) { - return; - } - const full = entry.fullPath; - const dir = this.fsw._getWatchedDir(directory); - - if (!this.fsw.options.followSymlinks) { - // watch symlink directly (don't follow) and detect changes - this.fsw._incrReadyCount(); - - let linkPath; - try { - linkPath = await fsrealpath(path); - } catch (e) { - this.fsw._emitReady(); - return true; - } - - if (this.fsw.closed) return; - if (dir.has(item)) { - if (this.fsw._symlinkPaths.get(full) !== linkPath) { - this.fsw._symlinkPaths.set(full, linkPath); - this.fsw._emit(EV_CHANGE, path, entry.stats); - } - } else { - dir.add(item); - this.fsw._symlinkPaths.set(full, linkPath); - this.fsw._emit(EV_ADD, path, entry.stats); - } - this.fsw._emitReady(); - return true; - } - - // don't follow the same symlink more than once - if (this.fsw._symlinkPaths.has(full)) { - return true; - } - - this.fsw._symlinkPaths.set(full, true); -} - -_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) { - // Normalize the directory name on Windows - directory = sysPath.join(directory, EMPTY_STR); - - if (!wh.hasGlob) { - throttler = this.fsw._throttle('readdir', directory, 1000); - if (!throttler) return; - } - - const previous = this.fsw._getWatchedDir(wh.path); - const current = new Set(); - - let stream = this.fsw._readdirp(directory, { - fileFilter: entry => wh.filterPath(entry), - directoryFilter: entry => wh.filterDir(entry), - depth: 0 - }).on(STR_DATA, async (entry) => { - if (this.fsw.closed) { - stream = undefined; - return; - } - const item = entry.path; - let path = sysPath.join(directory, item); - current.add(item); - - if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) { - return; - } - - if (this.fsw.closed) { - stream = undefined; - return; - } - // Files that present in current directory snapshot - // but absent in previous are added to watch list and - // emit `add` event. - if (item === target || !target && !previous.has(item)) { - this.fsw._incrReadyCount(); - - // ensure relativeness of path is preserved in case of watcher reuse - path = sysPath.join(dir, sysPath.relative(dir, path)); - - this._addToNodeFs(path, initialAdd, wh, depth + 1); - } - }).on(EV_ERROR, this._boundHandleError); - - return new Promise(resolve => - stream.once(STR_END, () => { - if (this.fsw.closed) { - stream = undefined; - return; - } - const wasThrottled = throttler ? throttler.clear() : false; - - resolve(); - - // Files that absent in current directory snapshot - // but present in previous emit `remove` event - // and are removed from @watched[directory]. - previous.getChildren().filter((item) => { - return item !== directory && - !current.has(item) && - // in case of intersecting globs; - // a path may have been filtered out of this readdir, but - // shouldn't be removed because it matches a different glob - (!wh.hasGlob || wh.filterPath({ - fullPath: sysPath.resolve(directory, item) - })); - }).forEach((item) => { - this.fsw._remove(directory, item); - }); - - stream = undefined; - - // one more time for any missed in case changes came in extremely quickly - if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler); - }) - ); -} - -/** - * Read directory to add / remove files from `@watched` list and re-read it on change. - * @param {String} dir fs path - * @param {fs.Stats} stats - * @param {Boolean} initialAdd - * @param {Number} depth relative to user-supplied path - * @param {String} target child path targeted for watch - * @param {Object} wh Common watch helpers for this path - * @param {String} realpath - * @returns {Promise} closer for the watcher instance. - */ -async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) { - const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir)); - const tracked = parentDir.has(sysPath.basename(dir)); - if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) { - if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats); - } - - // ensure dir is tracked (harmless if redundant) - parentDir.add(sysPath.basename(dir)); - this.fsw._getWatchedDir(dir); - let throttler; - let closer; - - const oDepth = this.fsw.options.depth; - if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) { - if (!target) { - await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler); - if (this.fsw.closed) return; - } - - closer = this._watchWithNodeFs(dir, (dirPath, stats) => { - // if current directory is removed, do nothing - if (stats && stats.mtimeMs === 0) return; - - this._handleRead(dirPath, false, wh, target, dir, depth, throttler); - }); - } - return closer; -} - -/** - * Handle added file, directory, or glob pattern. - * Delegates call to _handleFile / _handleDir after checks. - * @param {String} path to file or ir - * @param {Boolean} initialAdd was the file added at watch instantiation? - * @param {Object} priorWh depth relative to user-supplied path - * @param {Number} depth Child path actually targeted for watch - * @param {String=} target Child path actually targeted for watch - * @returns {Promise} - */ -async _addToNodeFs(path, initialAdd, priorWh, depth, target) { - const ready = this.fsw._emitReady; - if (this.fsw._isIgnored(path) || this.fsw.closed) { - ready(); - return false; - } - - const wh = this.fsw._getWatchHelpers(path, depth); - if (!wh.hasGlob && priorWh) { - wh.hasGlob = priorWh.hasGlob; - wh.globFilter = priorWh.globFilter; - wh.filterPath = entry => priorWh.filterPath(entry); - wh.filterDir = entry => priorWh.filterDir(entry); - } - - // evaluate what is at the path we're being asked to watch - try { - const stats = await statMethods[wh.statMethod](wh.watchPath); - if (this.fsw.closed) return; - if (this.fsw._isIgnored(wh.watchPath, stats)) { - ready(); - return false; - } - - const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START); - let closer; - if (stats.isDirectory()) { - const absPath = sysPath.resolve(path); - const targetPath = follow ? await fsrealpath(path) : path; - if (this.fsw.closed) return; - closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath); - if (this.fsw.closed) return; - // preserve this symlink's target path - if (absPath !== targetPath && targetPath !== undefined) { - this.fsw._symlinkPaths.set(absPath, targetPath); - } - } else if (stats.isSymbolicLink()) { - const targetPath = follow ? await fsrealpath(path) : path; - if (this.fsw.closed) return; - const parent = sysPath.dirname(wh.watchPath); - this.fsw._getWatchedDir(parent).add(wh.watchPath); - this.fsw._emit(EV_ADD, wh.watchPath, stats); - closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath); - if (this.fsw.closed) return; - - // preserve this symlink's target path - if (targetPath !== undefined) { - this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath); - } - } else { - closer = this._handleFile(wh.watchPath, stats, initialAdd); - } - ready(); - - this.fsw._addPathCloser(path, closer); - return false; - - } catch (error) { - if (this.fsw._handleError(error)) { - ready(); - return path; - } - } -} - -} - -module.exports = NodeFsHandler; diff --git a/node_modules/chokidar/package.json b/node_modules/chokidar/package.json deleted file mode 100644 index e8f8b3d..0000000 --- a/node_modules/chokidar/package.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "name": "chokidar", - "description": "Minimal and efficient cross-platform file watching library", - "version": "3.6.0", - "homepage": "https://github.com/paulmillr/chokidar", - "author": "Paul Miller (https://paulmillr.com)", - "contributors": [ - "Paul Miller (https://paulmillr.com)", - "Elan Shanker" - ], - "engines": { - "node": ">= 8.10.0" - }, - "main": "index.js", - "types": "./types/index.d.ts", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - }, - "devDependencies": { - "@types/node": "^14", - "chai": "^4.3", - "dtslint": "^3.3.0", - "eslint": "^7.0.0", - "mocha": "^7.0.0", - "rimraf": "^3.0.0", - "sinon": "^9.0.1", - "sinon-chai": "^3.3.0", - "typescript": "^4.4.3", - "upath": "^1.2.0" - }, - "files": [ - "index.js", - "lib/*.js", - "types/index.d.ts" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/paulmillr/chokidar.git" - }, - "bugs": { - "url": "https://github.com/paulmillr/chokidar/issues" - }, - "license": "MIT", - "scripts": { - "dtslint": "dtslint types", - "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", - "build": "npm ls", - "mocha": "mocha --exit --timeout 90000", - "test": "npm run lint && npm run mocha" - }, - "keywords": [ - "fs", - "watch", - "watchFile", - "watcher", - "watching", - "file", - "fsevents" - ], - "funding": "https://paulmillr.com/funding/" -} diff --git a/node_modules/chokidar/types/index.d.ts b/node_modules/chokidar/types/index.d.ts deleted file mode 100644 index 4558066..0000000 --- a/node_modules/chokidar/types/index.d.ts +++ /dev/null @@ -1,192 +0,0 @@ -// TypeScript Version: 3.0 - -/// - -import * as fs from "fs"; -import { EventEmitter } from "events"; -import { Matcher } from 'anymatch'; - -export class FSWatcher extends EventEmitter implements fs.FSWatcher { - options: WatchOptions; - - /** - * Constructs a new FSWatcher instance with optional WatchOptions parameter. - */ - constructor(options?: WatchOptions); - - /** - * Add files, directories, or glob patterns for tracking. Takes an array of strings or just one - * string. - */ - add(paths: string | ReadonlyArray): this; - - /** - * Stop watching files, directories, or glob patterns. Takes an array of strings or just one - * string. - */ - unwatch(paths: string | ReadonlyArray): this; - - /** - * Returns an object representing all the paths on the file system being watched by this - * `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless - * the `cwd` option was used), and the values are arrays of the names of the items contained in - * each directory. - */ - getWatched(): { - [directory: string]: string[]; - }; - - /** - * Removes all listeners from watched files. - */ - close(): Promise; - - on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this; - - on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this; - - /** - * Error occurred - */ - on(event: 'error', listener: (error: Error) => void): this; - - /** - * Exposes the native Node `fs.FSWatcher events` - */ - on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this; - - /** - * Fires when the initial scan is complete - */ - on(event: 'ready', listener: () => void): this; - - on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this; - - on(event: string, listener: (...args: any[]) => void): this; - - ref(): this; - - unref(): this; -} - -export interface WatchOptions { - /** - * Indicates whether the process should continue to run as long as files are being watched. If - * set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`, - * even if the process continues to run. - */ - persistent?: boolean; - - /** - * ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to - * be ignored. The whole relative or absolute path is tested, not just filename. If a function - * with two arguments is provided, it gets called twice per path - once with a single argument - * (the path), second time with two arguments (the path and the - * [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path). - */ - ignored?: Matcher; - - /** - * If set to `false` then `add`/`addDir` events are also emitted for matching paths while - * instantiating the watching as chokidar discovers these file paths (before the `ready` event). - */ - ignoreInitial?: boolean; - - /** - * When `false`, only the symlinks themselves will be watched for changes instead of following - * the link references and bubbling events through the link's path. - */ - followSymlinks?: boolean; - - /** - * The base directory from which watch `paths` are to be derived. Paths emitted with events will - * be relative to this. - */ - cwd?: string; - - /** - * If set to true then the strings passed to .watch() and .add() are treated as literal path - * names, even if they look like globs. Default: false. - */ - disableGlobbing?: boolean; - - /** - * Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU - * utilization, consider setting this to `false`. It is typically necessary to **set this to - * `true` to successfully watch files over a network**, and it may be necessary to successfully - * watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides - * the `useFsEvents` default. - */ - usePolling?: boolean; - - /** - * Whether to use the `fsevents` watching interface if available. When set to `true` explicitly - * and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on - * OS X, `usePolling: true` becomes the default. - */ - useFsEvents?: boolean; - - /** - * If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that - * may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is - * provided even in cases where it wasn't already available from the underlying watch events. - */ - alwaysStat?: boolean; - - /** - * If set, limits how many levels of subdirectories will be traversed. - */ - depth?: number; - - /** - * Interval of file system polling. - */ - interval?: number; - - /** - * Interval of file system polling for binary files. ([see list of binary extensions](https://gi - * thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json)) - */ - binaryInterval?: number; - - /** - * Indicates whether to watch files that don't have read permissions if possible. If watching - * fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed - * silently. - */ - ignorePermissionErrors?: boolean; - - /** - * `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts - * that occur when using editors that use "atomic writes" instead of writing directly to the - * source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change` - * event rather than `unlink` then `add`. If the default of 100 ms does not work well for you, - * you can override it by setting `atomic` to a custom value, in milliseconds. - */ - atomic?: boolean | number; - - /** - * can be set to an object in order to adjust timing params: - */ - awaitWriteFinish?: AwaitWriteFinishOptions | boolean; -} - -export interface AwaitWriteFinishOptions { - /** - * Amount of time in milliseconds for a file size to remain constant before emitting its event. - */ - stabilityThreshold?: number; - - /** - * File size polling interval. - */ - pollInterval?: number; -} - -/** - * produces an instance of `FSWatcher`. - */ -export function watch( - paths: string | ReadonlyArray, - options?: WatchOptions -): FSWatcher; diff --git a/node_modules/cliui/CHANGELOG.md b/node_modules/cliui/CHANGELOG.md deleted file mode 100644 index 61f06c3..0000000 --- a/node_modules/cliui/CHANGELOG.md +++ /dev/null @@ -1,139 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [8.0.1](https://github.com/yargs/cliui/compare/v8.0.0...v8.0.1) (2022-10-01) - - -### Bug Fixes - -* **deps:** move rollup-plugin-ts to dev deps ([#124](https://github.com/yargs/cliui/issues/124)) ([7c8bd6b](https://github.com/yargs/cliui/commit/7c8bd6ba024d61e4eeae310c7959ab8ab6829081)) - -## [8.0.0](https://github.com/yargs/cliui/compare/v7.0.4...v8.0.0) (2022-09-30) - - -### ⚠ BREAKING CHANGES - -* **deps:** drop Node 10 to release CVE-2021-3807 patch (#122) - -### Bug Fixes - -* **deps:** drop Node 10 to release CVE-2021-3807 patch ([#122](https://github.com/yargs/cliui/issues/122)) ([f156571](https://github.com/yargs/cliui/commit/f156571ce4f2ebf313335e3a53ad905589da5a30)) - -### [7.0.4](https://www.github.com/yargs/cliui/compare/v7.0.3...v7.0.4) (2020-11-08) - - -### Bug Fixes - -* **deno:** import UIOptions from definitions ([#97](https://www.github.com/yargs/cliui/issues/97)) ([f04f343](https://www.github.com/yargs/cliui/commit/f04f3439bc78114c7e90f82ff56f5acf16268ea8)) - -### [7.0.3](https://www.github.com/yargs/cliui/compare/v7.0.2...v7.0.3) (2020-10-16) - - -### Bug Fixes - -* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#93](https://www.github.com/yargs/cliui/issues/93)) ([eca16fc](https://www.github.com/yargs/cliui/commit/eca16fc05d26255df3280906c36d7f0e5b05c6e9)) - -### [7.0.2](https://www.github.com/yargs/cliui/compare/v7.0.1...v7.0.2) (2020-10-14) - - -### Bug Fixes - -* **exports:** node 13.0-13.6 require a string fallback ([#91](https://www.github.com/yargs/cliui/issues/91)) ([b529d7e](https://www.github.com/yargs/cliui/commit/b529d7e432901af1af7848b23ed6cf634497d961)) - -### [7.0.1](https://www.github.com/yargs/cliui/compare/v7.0.0...v7.0.1) (2020-08-16) - - -### Bug Fixes - -* **build:** main should be build/index.cjs ([dc29a3c](https://www.github.com/yargs/cliui/commit/dc29a3cc617a410aa850e06337b5954b04f2cb4d)) - -## [7.0.0](https://www.github.com/yargs/cliui/compare/v6.0.0...v7.0.0) (2020-08-16) - - -### ⚠ BREAKING CHANGES - -* tsc/ESM/Deno support (#82) -* modernize deps and build (#80) - -### Build System - -* modernize deps and build ([#80](https://www.github.com/yargs/cliui/issues/80)) ([339d08d](https://www.github.com/yargs/cliui/commit/339d08dc71b15a3928aeab09042af94db2f43743)) - - -### Code Refactoring - -* tsc/ESM/Deno support ([#82](https://www.github.com/yargs/cliui/issues/82)) ([4b777a5](https://www.github.com/yargs/cliui/commit/4b777a5fe01c5d8958c6708695d6aab7dbe5706c)) - -## [6.0.0](https://www.github.com/yargs/cliui/compare/v5.0.0...v6.0.0) (2019-11-10) - - -### ⚠ BREAKING CHANGES - -* update deps, drop Node 6 - -### Code Refactoring - -* update deps, drop Node 6 ([62056df](https://www.github.com/yargs/cliui/commit/62056df)) - -## [5.0.0](https://github.com/yargs/cliui/compare/v4.1.0...v5.0.0) (2019-04-10) - - -### Bug Fixes - -* Update wrap-ansi to fix compatibility with latest versions of chalk. ([#60](https://github.com/yargs/cliui/issues/60)) ([7bf79ae](https://github.com/yargs/cliui/commit/7bf79ae)) - - -### BREAKING CHANGES - -* Drop support for node < 6. - - - - -## [4.1.0](https://github.com/yargs/cliui/compare/v4.0.0...v4.1.0) (2018-04-23) - - -### Features - -* add resetOutput method ([#57](https://github.com/yargs/cliui/issues/57)) ([7246902](https://github.com/yargs/cliui/commit/7246902)) - - - - -## [4.0.0](https://github.com/yargs/cliui/compare/v3.2.0...v4.0.0) (2017-12-18) - - -### Bug Fixes - -* downgrades strip-ansi to version 3.0.1 ([#54](https://github.com/yargs/cliui/issues/54)) ([5764c46](https://github.com/yargs/cliui/commit/5764c46)) -* set env variable FORCE_COLOR. ([#56](https://github.com/yargs/cliui/issues/56)) ([7350e36](https://github.com/yargs/cliui/commit/7350e36)) - - -### Chores - -* drop support for node < 4 ([#53](https://github.com/yargs/cliui/issues/53)) ([b105376](https://github.com/yargs/cliui/commit/b105376)) - - -### Features - -* add fallback for window width ([#45](https://github.com/yargs/cliui/issues/45)) ([d064922](https://github.com/yargs/cliui/commit/d064922)) - - -### BREAKING CHANGES - -* officially drop support for Node < 4 - - - - -## [3.2.0](https://github.com/yargs/cliui/compare/v3.1.2...v3.2.0) (2016-04-11) - - -### Bug Fixes - -* reduces tarball size ([acc6c33](https://github.com/yargs/cliui/commit/acc6c33)) - -### Features - -* adds standard-version for release management ([ff84e32](https://github.com/yargs/cliui/commit/ff84e32)) diff --git a/node_modules/cliui/LICENSE.txt b/node_modules/cliui/LICENSE.txt deleted file mode 100644 index c7e2747..0000000 --- a/node_modules/cliui/LICENSE.txt +++ /dev/null @@ -1,14 +0,0 @@ -Copyright (c) 2015, Contributors - -Permission to use, copy, modify, and/or distribute this software -for any purpose with or without fee is hereby granted, provided -that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE -LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES -OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, -ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cliui/README.md b/node_modules/cliui/README.md deleted file mode 100644 index 65b5672..0000000 --- a/node_modules/cliui/README.md +++ /dev/null @@ -1,141 +0,0 @@ -# cliui - -![ci](https://github.com/yargs/cliui/workflows/ci/badge.svg) -[![NPM version](https://img.shields.io/npm/v/cliui.svg)](https://www.npmjs.com/package/cliui) -[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) -![nycrc config on GitHub](https://img.shields.io/nycrc/yargs/cliui) - -easily create complex multi-column command-line-interfaces. - -## Example - -```js -const ui = require('cliui')() - -ui.div('Usage: $0 [command] [options]') - -ui.div({ - text: 'Options:', - padding: [2, 0, 1, 0] -}) - -ui.div( - { - text: "-f, --file", - width: 20, - padding: [0, 4, 0, 4] - }, - { - text: "the file to load." + - chalk.green("(if this description is long it wraps).") - , - width: 20 - }, - { - text: chalk.red("[required]"), - align: 'right' - } -) - -console.log(ui.toString()) -``` - -## Deno/ESM Support - -As of `v7` `cliui` supports [Deno](https://github.com/denoland/deno) and -[ESM](https://nodejs.org/api/esm.html#esm_ecmascript_modules): - -```typescript -import cliui from "https://deno.land/x/cliui/deno.ts"; - -const ui = cliui({}) - -ui.div('Usage: $0 [command] [options]') - -ui.div({ - text: 'Options:', - padding: [2, 0, 1, 0] -}) - -ui.div({ - text: "-f, --file", - width: 20, - padding: [0, 4, 0, 4] -}) - -console.log(ui.toString()) -``` - - - -## Layout DSL - -cliui exposes a simple layout DSL: - -If you create a single `ui.div`, passing a string rather than an -object: - -* `\n`: characters will be interpreted as new rows. -* `\t`: characters will be interpreted as new columns. -* `\s`: characters will be interpreted as padding. - -**as an example...** - -```js -var ui = require('./')({ - width: 60 -}) - -ui.div( - 'Usage: node ./bin/foo.js\n' + - ' \t provide a regex\n' + - ' \t provide a glob\t [required]' -) - -console.log(ui.toString()) -``` - -**will output:** - -```shell -Usage: node ./bin/foo.js - provide a regex - provide a glob [required] -``` - -## Methods - -```js -cliui = require('cliui') -``` - -### cliui({width: integer}) - -Specify the maximum width of the UI being generated. -If no width is provided, cliui will try to get the current window's width and use it, and if that doesn't work, width will be set to `80`. - -### cliui({wrap: boolean}) - -Enable or disable the wrapping of text in a column. - -### cliui.div(column, column, column) - -Create a row with any number of columns, a column -can either be a string, or an object with the following -options: - -* **text:** some text to place in the column. -* **width:** the width of a column. -* **align:** alignment, `right` or `center`. -* **padding:** `[top, right, bottom, left]`. -* **border:** should a border be placed around the div? - -### cliui.span(column, column, column) - -Similar to `div`, except the next row will be appended without -a new line being created. - -### cliui.resetOutput() - -Resets the UI elements of the current cliui instance, maintaining the values -set for `width` and `wrap`. diff --git a/node_modules/cliui/build/index.cjs b/node_modules/cliui/build/index.cjs deleted file mode 100644 index 82126b6..0000000 --- a/node_modules/cliui/build/index.cjs +++ /dev/null @@ -1,302 +0,0 @@ -'use strict'; - -const align = { - right: alignRight, - center: alignCenter -}; -const top = 0; -const right = 1; -const bottom = 2; -const left = 3; -class UI { - constructor(opts) { - var _a; - this.width = opts.width; - this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; - this.rows = []; - } - span(...args) { - const cols = this.div(...args); - cols.span = true; - } - resetOutput() { - this.rows = []; - } - div(...args) { - if (args.length === 0) { - this.div(''); - } - if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { - return this.applyLayoutDSL(args[0]); - } - const cols = args.map(arg => { - if (typeof arg === 'string') { - return this.colFromString(arg); - } - return arg; - }); - this.rows.push(cols); - return cols; - } - shouldApplyLayoutDSL(...args) { - return args.length === 1 && typeof args[0] === 'string' && - /[\t\n]/.test(args[0]); - } - applyLayoutDSL(str) { - const rows = str.split('\n').map(row => row.split('\t')); - let leftColumnWidth = 0; - // simple heuristic for layout, make sure the - // second column lines up along the left-hand. - // don't allow the first column to take up more - // than 50% of the screen. - rows.forEach(columns => { - if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { - leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); - } - }); - // generate a table: - // replacing ' ' with padding calculations. - // using the algorithmically generated width. - rows.forEach(columns => { - this.div(...columns.map((r, i) => { - return { - text: r.trim(), - padding: this.measurePadding(r), - width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined - }; - })); - }); - return this.rows[this.rows.length - 1]; - } - colFromString(text) { - return { - text, - padding: this.measurePadding(text) - }; - } - measurePadding(str) { - // measure padding without ansi escape codes - const noAnsi = mixin.stripAnsi(str); - return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; - } - toString() { - const lines = []; - this.rows.forEach(row => { - this.rowToString(row, lines); - }); - // don't display any lines with the - // hidden flag set. - return lines - .filter(line => !line.hidden) - .map(line => line.text) - .join('\n'); - } - rowToString(row, lines) { - this.rasterize(row).forEach((rrow, r) => { - let str = ''; - rrow.forEach((col, c) => { - const { width } = row[c]; // the width with padding. - const wrapWidth = this.negatePadding(row[c]); // the width without padding. - let ts = col; // temporary string used during alignment/padding. - if (wrapWidth > mixin.stringWidth(col)) { - ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); - } - // align the string within its column. - if (row[c].align && row[c].align !== 'left' && this.wrap) { - const fn = align[row[c].align]; - ts = fn(ts, wrapWidth); - if (mixin.stringWidth(ts) < wrapWidth) { - ts += ' '.repeat((width || 0) - mixin.stringWidth(ts) - 1); - } - } - // apply border and padding to string. - const padding = row[c].padding || [0, 0, 0, 0]; - if (padding[left]) { - str += ' '.repeat(padding[left]); - } - str += addBorder(row[c], ts, '| '); - str += ts; - str += addBorder(row[c], ts, ' |'); - if (padding[right]) { - str += ' '.repeat(padding[right]); - } - // if prior row is span, try to render the - // current row on the prior line. - if (r === 0 && lines.length > 0) { - str = this.renderInline(str, lines[lines.length - 1]); - } - }); - // remove trailing whitespace. - lines.push({ - text: str.replace(/ +$/, ''), - span: row.span - }); - }); - return lines; - } - // if the full 'source' can render in - // the target line, do so. - renderInline(source, previousLine) { - const match = source.match(/^ */); - const leadingWhitespace = match ? match[0].length : 0; - const target = previousLine.text; - const targetTextWidth = mixin.stringWidth(target.trimRight()); - if (!previousLine.span) { - return source; - } - // if we're not applying wrapping logic, - // just always append to the span. - if (!this.wrap) { - previousLine.hidden = true; - return target + source; - } - if (leadingWhitespace < targetTextWidth) { - return source; - } - previousLine.hidden = true; - return target.trimRight() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimLeft(); - } - rasterize(row) { - const rrows = []; - const widths = this.columnWidths(row); - let wrapped; - // word wrap all columns, and create - // a data-structure that is easy to rasterize. - row.forEach((col, c) => { - // leave room for left and right padding. - col.width = widths[c]; - if (this.wrap) { - wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); - } - else { - wrapped = col.text.split('\n'); - } - if (col.border) { - wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); - wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); - } - // add top and bottom padding. - if (col.padding) { - wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); - wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); - } - wrapped.forEach((str, r) => { - if (!rrows[r]) { - rrows.push([]); - } - const rrow = rrows[r]; - for (let i = 0; i < c; i++) { - if (rrow[i] === undefined) { - rrow.push(''); - } - } - rrow.push(str); - }); - }); - return rrows; - } - negatePadding(col) { - let wrapWidth = col.width || 0; - if (col.padding) { - wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); - } - if (col.border) { - wrapWidth -= 4; - } - return wrapWidth; - } - columnWidths(row) { - if (!this.wrap) { - return row.map(col => { - return col.width || mixin.stringWidth(col.text); - }); - } - let unset = row.length; - let remainingWidth = this.width; - // column widths can be set in config. - const widths = row.map(col => { - if (col.width) { - unset--; - remainingWidth -= col.width; - return col.width; - } - return undefined; - }); - // any unset widths should be calculated. - const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; - return widths.map((w, i) => { - if (w === undefined) { - return Math.max(unsetWidth, _minWidth(row[i])); - } - return w; - }); - } -} -function addBorder(col, ts, style) { - if (col.border) { - if (/[.']-+[.']/.test(ts)) { - return ''; - } - if (ts.trim().length !== 0) { - return style; - } - return ' '; - } - return ''; -} -// calculates the minimum width of -// a column, based on padding preferences. -function _minWidth(col) { - const padding = col.padding || []; - const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); - if (col.border) { - return minWidth + 4; - } - return minWidth; -} -function getWindowWidth() { - /* istanbul ignore next: depends on terminal */ - if (typeof process === 'object' && process.stdout && process.stdout.columns) { - return process.stdout.columns; - } - return 80; -} -function alignRight(str, width) { - str = str.trim(); - const strWidth = mixin.stringWidth(str); - if (strWidth < width) { - return ' '.repeat(width - strWidth) + str; - } - return str; -} -function alignCenter(str, width) { - str = str.trim(); - const strWidth = mixin.stringWidth(str); - /* istanbul ignore next */ - if (strWidth >= width) { - return str; - } - return ' '.repeat((width - strWidth) >> 1) + str; -} -let mixin; -function cliui(opts, _mixin) { - mixin = _mixin; - return new UI({ - width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), - wrap: opts === null || opts === void 0 ? void 0 : opts.wrap - }); -} - -// Bootstrap cliui with CommonJS dependencies: -const stringWidth = require('string-width'); -const stripAnsi = require('strip-ansi'); -const wrap = require('wrap-ansi'); -function ui(opts) { - return cliui(opts, { - stringWidth, - stripAnsi, - wrap - }); -} - -module.exports = ui; diff --git a/node_modules/cliui/build/index.d.cts b/node_modules/cliui/build/index.d.cts deleted file mode 100644 index 4567f94..0000000 --- a/node_modules/cliui/build/index.d.cts +++ /dev/null @@ -1,43 +0,0 @@ -interface UIOptions { - width: number; - wrap?: boolean; - rows?: string[]; -} -interface Column { - text: string; - width?: number; - align?: "right" | "left" | "center"; - padding: number[]; - border?: boolean; -} -interface ColumnArray extends Array { - span: boolean; -} -interface Line { - hidden?: boolean; - text: string; - span?: boolean; -} -declare class UI { - width: number; - wrap: boolean; - rows: ColumnArray[]; - constructor(opts: UIOptions); - span(...args: ColumnArray): void; - resetOutput(): void; - div(...args: (Column | string)[]): ColumnArray; - private shouldApplyLayoutDSL; - private applyLayoutDSL; - private colFromString; - private measurePadding; - toString(): string; - rowToString(row: ColumnArray, lines: Line[]): Line[]; - // if the full 'source' can render in - // the target line, do so. - private renderInline; - private rasterize; - private negatePadding; - private columnWidths; -} -declare function ui(opts: UIOptions): UI; -export { ui as default }; diff --git a/node_modules/cliui/build/lib/index.js b/node_modules/cliui/build/lib/index.js deleted file mode 100644 index b6eb054..0000000 --- a/node_modules/cliui/build/lib/index.js +++ /dev/null @@ -1,287 +0,0 @@ -'use strict'; -const align = { - right: alignRight, - center: alignCenter -}; -const top = 0; -const right = 1; -const bottom = 2; -const left = 3; -export class UI { - constructor(opts) { - var _a; - this.width = opts.width; - this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; - this.rows = []; - } - span(...args) { - const cols = this.div(...args); - cols.span = true; - } - resetOutput() { - this.rows = []; - } - div(...args) { - if (args.length === 0) { - this.div(''); - } - if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { - return this.applyLayoutDSL(args[0]); - } - const cols = args.map(arg => { - if (typeof arg === 'string') { - return this.colFromString(arg); - } - return arg; - }); - this.rows.push(cols); - return cols; - } - shouldApplyLayoutDSL(...args) { - return args.length === 1 && typeof args[0] === 'string' && - /[\t\n]/.test(args[0]); - } - applyLayoutDSL(str) { - const rows = str.split('\n').map(row => row.split('\t')); - let leftColumnWidth = 0; - // simple heuristic for layout, make sure the - // second column lines up along the left-hand. - // don't allow the first column to take up more - // than 50% of the screen. - rows.forEach(columns => { - if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { - leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); - } - }); - // generate a table: - // replacing ' ' with padding calculations. - // using the algorithmically generated width. - rows.forEach(columns => { - this.div(...columns.map((r, i) => { - return { - text: r.trim(), - padding: this.measurePadding(r), - width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined - }; - })); - }); - return this.rows[this.rows.length - 1]; - } - colFromString(text) { - return { - text, - padding: this.measurePadding(text) - }; - } - measurePadding(str) { - // measure padding without ansi escape codes - const noAnsi = mixin.stripAnsi(str); - return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; - } - toString() { - const lines = []; - this.rows.forEach(row => { - this.rowToString(row, lines); - }); - // don't display any lines with the - // hidden flag set. - return lines - .filter(line => !line.hidden) - .map(line => line.text) - .join('\n'); - } - rowToString(row, lines) { - this.rasterize(row).forEach((rrow, r) => { - let str = ''; - rrow.forEach((col, c) => { - const { width } = row[c]; // the width with padding. - const wrapWidth = this.negatePadding(row[c]); // the width without padding. - let ts = col; // temporary string used during alignment/padding. - if (wrapWidth > mixin.stringWidth(col)) { - ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); - } - // align the string within its column. - if (row[c].align && row[c].align !== 'left' && this.wrap) { - const fn = align[row[c].align]; - ts = fn(ts, wrapWidth); - if (mixin.stringWidth(ts) < wrapWidth) { - ts += ' '.repeat((width || 0) - mixin.stringWidth(ts) - 1); - } - } - // apply border and padding to string. - const padding = row[c].padding || [0, 0, 0, 0]; - if (padding[left]) { - str += ' '.repeat(padding[left]); - } - str += addBorder(row[c], ts, '| '); - str += ts; - str += addBorder(row[c], ts, ' |'); - if (padding[right]) { - str += ' '.repeat(padding[right]); - } - // if prior row is span, try to render the - // current row on the prior line. - if (r === 0 && lines.length > 0) { - str = this.renderInline(str, lines[lines.length - 1]); - } - }); - // remove trailing whitespace. - lines.push({ - text: str.replace(/ +$/, ''), - span: row.span - }); - }); - return lines; - } - // if the full 'source' can render in - // the target line, do so. - renderInline(source, previousLine) { - const match = source.match(/^ */); - const leadingWhitespace = match ? match[0].length : 0; - const target = previousLine.text; - const targetTextWidth = mixin.stringWidth(target.trimRight()); - if (!previousLine.span) { - return source; - } - // if we're not applying wrapping logic, - // just always append to the span. - if (!this.wrap) { - previousLine.hidden = true; - return target + source; - } - if (leadingWhitespace < targetTextWidth) { - return source; - } - previousLine.hidden = true; - return target.trimRight() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimLeft(); - } - rasterize(row) { - const rrows = []; - const widths = this.columnWidths(row); - let wrapped; - // word wrap all columns, and create - // a data-structure that is easy to rasterize. - row.forEach((col, c) => { - // leave room for left and right padding. - col.width = widths[c]; - if (this.wrap) { - wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); - } - else { - wrapped = col.text.split('\n'); - } - if (col.border) { - wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); - wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); - } - // add top and bottom padding. - if (col.padding) { - wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); - wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); - } - wrapped.forEach((str, r) => { - if (!rrows[r]) { - rrows.push([]); - } - const rrow = rrows[r]; - for (let i = 0; i < c; i++) { - if (rrow[i] === undefined) { - rrow.push(''); - } - } - rrow.push(str); - }); - }); - return rrows; - } - negatePadding(col) { - let wrapWidth = col.width || 0; - if (col.padding) { - wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); - } - if (col.border) { - wrapWidth -= 4; - } - return wrapWidth; - } - columnWidths(row) { - if (!this.wrap) { - return row.map(col => { - return col.width || mixin.stringWidth(col.text); - }); - } - let unset = row.length; - let remainingWidth = this.width; - // column widths can be set in config. - const widths = row.map(col => { - if (col.width) { - unset--; - remainingWidth -= col.width; - return col.width; - } - return undefined; - }); - // any unset widths should be calculated. - const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; - return widths.map((w, i) => { - if (w === undefined) { - return Math.max(unsetWidth, _minWidth(row[i])); - } - return w; - }); - } -} -function addBorder(col, ts, style) { - if (col.border) { - if (/[.']-+[.']/.test(ts)) { - return ''; - } - if (ts.trim().length !== 0) { - return style; - } - return ' '; - } - return ''; -} -// calculates the minimum width of -// a column, based on padding preferences. -function _minWidth(col) { - const padding = col.padding || []; - const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); - if (col.border) { - return minWidth + 4; - } - return minWidth; -} -function getWindowWidth() { - /* istanbul ignore next: depends on terminal */ - if (typeof process === 'object' && process.stdout && process.stdout.columns) { - return process.stdout.columns; - } - return 80; -} -function alignRight(str, width) { - str = str.trim(); - const strWidth = mixin.stringWidth(str); - if (strWidth < width) { - return ' '.repeat(width - strWidth) + str; - } - return str; -} -function alignCenter(str, width) { - str = str.trim(); - const strWidth = mixin.stringWidth(str); - /* istanbul ignore next */ - if (strWidth >= width) { - return str; - } - return ' '.repeat((width - strWidth) >> 1) + str; -} -let mixin; -export function cliui(opts, _mixin) { - mixin = _mixin; - return new UI({ - width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), - wrap: opts === null || opts === void 0 ? void 0 : opts.wrap - }); -} diff --git a/node_modules/cliui/build/lib/string-utils.js b/node_modules/cliui/build/lib/string-utils.js deleted file mode 100644 index 4b87453..0000000 --- a/node_modules/cliui/build/lib/string-utils.js +++ /dev/null @@ -1,27 +0,0 @@ -// Minimal replacement for ansi string helpers "wrap-ansi" and "strip-ansi". -// to facilitate ESM and Deno modules. -// TODO: look at porting https://www.npmjs.com/package/wrap-ansi to ESM. -// The npm application -// Copyright (c) npm, Inc. and Contributors -// Licensed on the terms of The Artistic License 2.0 -// See: https://github.com/npm/cli/blob/4c65cd952bc8627811735bea76b9b110cc4fc80e/lib/utils/ansi-trim.js -const ansi = new RegExp('\x1b(?:\\[(?:\\d+[ABCDEFGJKSTm]|\\d+;\\d+[Hfm]|' + - '\\d+;\\d+;\\d+m|6n|s|u|\\?25[lh])|\\w)', 'g'); -export function stripAnsi(str) { - return str.replace(ansi, ''); -} -export function wrap(str, width) { - const [start, end] = str.match(ansi) || ['', '']; - str = stripAnsi(str); - let wrapped = ''; - for (let i = 0; i < str.length; i++) { - if (i !== 0 && (i % width) === 0) { - wrapped += '\n'; - } - wrapped += str.charAt(i); - } - if (start && end) { - wrapped = `${start}${wrapped}${end}`; - } - return wrapped; -} diff --git a/node_modules/cliui/index.mjs b/node_modules/cliui/index.mjs deleted file mode 100644 index bc7a022..0000000 --- a/node_modules/cliui/index.mjs +++ /dev/null @@ -1,13 +0,0 @@ -// Bootstrap cliui with CommonJS dependencies: -import { cliui } from './build/lib/index.js' -import { wrap, stripAnsi } from './build/lib/string-utils.js' - -export default function ui (opts) { - return cliui(opts, { - stringWidth: (str) => { - return [...str].length - }, - stripAnsi, - wrap - }) -} diff --git a/node_modules/cliui/package.json b/node_modules/cliui/package.json deleted file mode 100644 index eab6bf4..0000000 --- a/node_modules/cliui/package.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "name": "cliui", - "version": "8.0.1", - "description": "easily create complex multi-column command-line-interfaces", - "main": "build/index.cjs", - "exports": { - ".": [ - { - "import": "./index.mjs", - "require": "./build/index.cjs" - }, - "./build/index.cjs" - ] - }, - "type": "module", - "module": "./index.mjs", - "scripts": { - "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", - "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", - "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", - "test": "c8 mocha ./test/*.cjs", - "test:esm": "c8 mocha ./test/esm/cliui-test.mjs", - "postest": "check", - "coverage": "c8 report --check-coverage", - "precompile": "rimraf build", - "compile": "tsc", - "postcompile": "npm run build:cjs", - "build:cjs": "rollup -c", - "prepare": "npm run compile" - }, - "repository": "yargs/cliui", - "standard": { - "ignore": [ - "**/example/**" - ], - "globals": [ - "it" - ] - }, - "keywords": [ - "cli", - "command-line", - "layout", - "design", - "console", - "wrap", - "table" - ], - "author": "Ben Coe ", - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "devDependencies": { - "@types/node": "^14.0.27", - "@typescript-eslint/eslint-plugin": "^4.0.0", - "@typescript-eslint/parser": "^4.0.0", - "c8": "^7.3.0", - "chai": "^4.2.0", - "chalk": "^4.1.0", - "cross-env": "^7.0.2", - "eslint": "^7.6.0", - "eslint-plugin-import": "^2.22.0", - "eslint-plugin-node": "^11.1.0", - "gts": "^3.0.0", - "mocha": "^10.0.0", - "rimraf": "^3.0.2", - "rollup": "^2.23.1", - "rollup-plugin-ts": "^3.0.2", - "standardx": "^7.0.0", - "typescript": "^4.0.0" - }, - "files": [ - "build", - "index.mjs", - "!*.d.ts" - ], - "engines": { - "node": ">=12" - } -} diff --git a/node_modules/color-convert/CHANGELOG.md b/node_modules/color-convert/CHANGELOG.md deleted file mode 100644 index 0a7bce4..0000000 --- a/node_modules/color-convert/CHANGELOG.md +++ /dev/null @@ -1,54 +0,0 @@ -# 1.0.0 - 2016-01-07 - -- Removed: unused speed test -- Added: Automatic routing between previously unsupported conversions -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Removed: `xxx2xxx()` and `xxx2xxxRaw()` functions -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Removed: `convert()` class -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Changed: all functions to lookup dictionary -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Changed: `ansi` to `ansi256` -([#27](https://github.com/Qix-/color-convert/pull/27)) -- Fixed: argument grouping for functions requiring only one argument -([#27](https://github.com/Qix-/color-convert/pull/27)) - -# 0.6.0 - 2015-07-23 - -- Added: methods to handle -[ANSI](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors) 16/256 colors: - - rgb2ansi16 - - rgb2ansi - - hsl2ansi16 - - hsl2ansi - - hsv2ansi16 - - hsv2ansi - - hwb2ansi16 - - hwb2ansi - - cmyk2ansi16 - - cmyk2ansi - - keyword2ansi16 - - keyword2ansi - - ansi162rgb - - ansi162hsl - - ansi162hsv - - ansi162hwb - - ansi162cmyk - - ansi162keyword - - ansi2rgb - - ansi2hsl - - ansi2hsv - - ansi2hwb - - ansi2cmyk - - ansi2keyword -([#18](https://github.com/harthur/color-convert/pull/18)) - -# 0.5.3 - 2015-06-02 - -- Fixed: hsl2hsv does not return `NaN` anymore when using `[0,0,0]` -([#15](https://github.com/harthur/color-convert/issues/15)) - ---- - -Check out commit logs for older releases diff --git a/node_modules/color-convert/LICENSE b/node_modules/color-convert/LICENSE deleted file mode 100644 index 5b4c386..0000000 --- a/node_modules/color-convert/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2011-2016 Heather Arthur - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/node_modules/color-convert/README.md b/node_modules/color-convert/README.md deleted file mode 100644 index d4b08fc..0000000 --- a/node_modules/color-convert/README.md +++ /dev/null @@ -1,68 +0,0 @@ -# color-convert - -[![Build Status](https://travis-ci.org/Qix-/color-convert.svg?branch=master)](https://travis-ci.org/Qix-/color-convert) - -Color-convert is a color conversion library for JavaScript and node. -It converts all ways between `rgb`, `hsl`, `hsv`, `hwb`, `cmyk`, `ansi`, `ansi16`, `hex` strings, and CSS `keyword`s (will round to closest): - -```js -var convert = require('color-convert'); - -convert.rgb.hsl(140, 200, 100); // [96, 48, 59] -convert.keyword.rgb('blue'); // [0, 0, 255] - -var rgbChannels = convert.rgb.channels; // 3 -var cmykChannels = convert.cmyk.channels; // 4 -var ansiChannels = convert.ansi16.channels; // 1 -``` - -# Install - -```console -$ npm install color-convert -``` - -# API - -Simply get the property of the _from_ and _to_ conversion that you're looking for. - -All functions have a rounded and unrounded variant. By default, return values are rounded. To get the unrounded (raw) results, simply tack on `.raw` to the function. - -All 'from' functions have a hidden property called `.channels` that indicates the number of channels the function expects (not including alpha). - -```js -var convert = require('color-convert'); - -// Hex to LAB -convert.hex.lab('DEADBF'); // [ 76, 21, -2 ] -convert.hex.lab.raw('DEADBF'); // [ 75.56213190997677, 20.653827952644754, -2.290532499330533 ] - -// RGB to CMYK -convert.rgb.cmyk(167, 255, 4); // [ 35, 0, 98, 0 ] -convert.rgb.cmyk.raw(167, 255, 4); // [ 34.509803921568626, 0, 98.43137254901961, 0 ] -``` - -### Arrays -All functions that accept multiple arguments also support passing an array. - -Note that this does **not** apply to functions that convert from a color that only requires one value (e.g. `keyword`, `ansi256`, `hex`, etc.) - -```js -var convert = require('color-convert'); - -convert.rgb.hex(123, 45, 67); // '7B2D43' -convert.rgb.hex([123, 45, 67]); // '7B2D43' -``` - -## Routing - -Conversions that don't have an _explicitly_ defined conversion (in [conversions.js](conversions.js)), but can be converted by means of sub-conversions (e.g. XYZ -> **RGB** -> CMYK), are automatically routed together. This allows just about any color model supported by `color-convert` to be converted to any other model, so long as a sub-conversion path exists. This is also true for conversions requiring more than one step in between (e.g. LCH -> **LAB** -> **XYZ** -> **RGB** -> Hex). - -Keep in mind that extensive conversions _may_ result in a loss of precision, and exist only to be complete. For a list of "direct" (single-step) conversions, see [conversions.js](conversions.js). - -# Contribute - -If there is a new model you would like to support, or want to add a direct conversion between two existing models, please send us a pull request. - -# License -Copyright © 2011-2016, Heather Arthur and Josh Junon. Licensed under the [MIT License](LICENSE). diff --git a/node_modules/color-convert/conversions.js b/node_modules/color-convert/conversions.js deleted file mode 100644 index 2657f26..0000000 --- a/node_modules/color-convert/conversions.js +++ /dev/null @@ -1,839 +0,0 @@ -/* MIT license */ -/* eslint-disable no-mixed-operators */ -const cssKeywords = require('color-name'); - -// NOTE: conversions should only return primitive values (i.e. arrays, or -// values that give correct `typeof` results). -// do not use box values types (i.e. Number(), String(), etc.) - -const reverseKeywords = {}; -for (const key of Object.keys(cssKeywords)) { - reverseKeywords[cssKeywords[key]] = key; -} - -const convert = { - rgb: {channels: 3, labels: 'rgb'}, - hsl: {channels: 3, labels: 'hsl'}, - hsv: {channels: 3, labels: 'hsv'}, - hwb: {channels: 3, labels: 'hwb'}, - cmyk: {channels: 4, labels: 'cmyk'}, - xyz: {channels: 3, labels: 'xyz'}, - lab: {channels: 3, labels: 'lab'}, - lch: {channels: 3, labels: 'lch'}, - hex: {channels: 1, labels: ['hex']}, - keyword: {channels: 1, labels: ['keyword']}, - ansi16: {channels: 1, labels: ['ansi16']}, - ansi256: {channels: 1, labels: ['ansi256']}, - hcg: {channels: 3, labels: ['h', 'c', 'g']}, - apple: {channels: 3, labels: ['r16', 'g16', 'b16']}, - gray: {channels: 1, labels: ['gray']} -}; - -module.exports = convert; - -// Hide .channels and .labels properties -for (const model of Object.keys(convert)) { - if (!('channels' in convert[model])) { - throw new Error('missing channels property: ' + model); - } - - if (!('labels' in convert[model])) { - throw new Error('missing channel labels property: ' + model); - } - - if (convert[model].labels.length !== convert[model].channels) { - throw new Error('channel and label counts mismatch: ' + model); - } - - const {channels, labels} = convert[model]; - delete convert[model].channels; - delete convert[model].labels; - Object.defineProperty(convert[model], 'channels', {value: channels}); - Object.defineProperty(convert[model], 'labels', {value: labels}); -} - -convert.rgb.hsl = function (rgb) { - const r = rgb[0] / 255; - const g = rgb[1] / 255; - const b = rgb[2] / 255; - const min = Math.min(r, g, b); - const max = Math.max(r, g, b); - const delta = max - min; - let h; - let s; - - if (max === min) { - h = 0; - } else if (r === max) { - h = (g - b) / delta; - } else if (g === max) { - h = 2 + (b - r) / delta; - } else if (b === max) { - h = 4 + (r - g) / delta; - } - - h = Math.min(h * 60, 360); - - if (h < 0) { - h += 360; - } - - const l = (min + max) / 2; - - if (max === min) { - s = 0; - } else if (l <= 0.5) { - s = delta / (max + min); - } else { - s = delta / (2 - max - min); - } - - return [h, s * 100, l * 100]; -}; - -convert.rgb.hsv = function (rgb) { - let rdif; - let gdif; - let bdif; - let h; - let s; - - const r = rgb[0] / 255; - const g = rgb[1] / 255; - const b = rgb[2] / 255; - const v = Math.max(r, g, b); - const diff = v - Math.min(r, g, b); - const diffc = function (c) { - return (v - c) / 6 / diff + 1 / 2; - }; - - if (diff === 0) { - h = 0; - s = 0; - } else { - s = diff / v; - rdif = diffc(r); - gdif = diffc(g); - bdif = diffc(b); - - if (r === v) { - h = bdif - gdif; - } else if (g === v) { - h = (1 / 3) + rdif - bdif; - } else if (b === v) { - h = (2 / 3) + gdif - rdif; - } - - if (h < 0) { - h += 1; - } else if (h > 1) { - h -= 1; - } - } - - return [ - h * 360, - s * 100, - v * 100 - ]; -}; - -convert.rgb.hwb = function (rgb) { - const r = rgb[0]; - const g = rgb[1]; - let b = rgb[2]; - const h = convert.rgb.hsl(rgb)[0]; - const w = 1 / 255 * Math.min(r, Math.min(g, b)); - - b = 1 - 1 / 255 * Math.max(r, Math.max(g, b)); - - return [h, w * 100, b * 100]; -}; - -convert.rgb.cmyk = function (rgb) { - const r = rgb[0] / 255; - const g = rgb[1] / 255; - const b = rgb[2] / 255; - - const k = Math.min(1 - r, 1 - g, 1 - b); - const c = (1 - r - k) / (1 - k) || 0; - const m = (1 - g - k) / (1 - k) || 0; - const y = (1 - b - k) / (1 - k) || 0; - - return [c * 100, m * 100, y * 100, k * 100]; -}; - -function comparativeDistance(x, y) { - /* - See https://en.m.wikipedia.org/wiki/Euclidean_distance#Squared_Euclidean_distance - */ - return ( - ((x[0] - y[0]) ** 2) + - ((x[1] - y[1]) ** 2) + - ((x[2] - y[2]) ** 2) - ); -} - -convert.rgb.keyword = function (rgb) { - const reversed = reverseKeywords[rgb]; - if (reversed) { - return reversed; - } - - let currentClosestDistance = Infinity; - let currentClosestKeyword; - - for (const keyword of Object.keys(cssKeywords)) { - const value = cssKeywords[keyword]; - - // Compute comparative distance - const distance = comparativeDistance(rgb, value); - - // Check if its less, if so set as closest - if (distance < currentClosestDistance) { - currentClosestDistance = distance; - currentClosestKeyword = keyword; - } - } - - return currentClosestKeyword; -}; - -convert.keyword.rgb = function (keyword) { - return cssKeywords[keyword]; -}; - -convert.rgb.xyz = function (rgb) { - let r = rgb[0] / 255; - let g = rgb[1] / 255; - let b = rgb[2] / 255; - - // Assume sRGB - r = r > 0.04045 ? (((r + 0.055) / 1.055) ** 2.4) : (r / 12.92); - g = g > 0.04045 ? (((g + 0.055) / 1.055) ** 2.4) : (g / 12.92); - b = b > 0.04045 ? (((b + 0.055) / 1.055) ** 2.4) : (b / 12.92); - - const x = (r * 0.4124) + (g * 0.3576) + (b * 0.1805); - const y = (r * 0.2126) + (g * 0.7152) + (b * 0.0722); - const z = (r * 0.0193) + (g * 0.1192) + (b * 0.9505); - - return [x * 100, y * 100, z * 100]; -}; - -convert.rgb.lab = function (rgb) { - const xyz = convert.rgb.xyz(rgb); - let x = xyz[0]; - let y = xyz[1]; - let z = xyz[2]; - - x /= 95.047; - y /= 100; - z /= 108.883; - - x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116); - y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116); - z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116); - - const l = (116 * y) - 16; - const a = 500 * (x - y); - const b = 200 * (y - z); - - return [l, a, b]; -}; - -convert.hsl.rgb = function (hsl) { - const h = hsl[0] / 360; - const s = hsl[1] / 100; - const l = hsl[2] / 100; - let t2; - let t3; - let val; - - if (s === 0) { - val = l * 255; - return [val, val, val]; - } - - if (l < 0.5) { - t2 = l * (1 + s); - } else { - t2 = l + s - l * s; - } - - const t1 = 2 * l - t2; - - const rgb = [0, 0, 0]; - for (let i = 0; i < 3; i++) { - t3 = h + 1 / 3 * -(i - 1); - if (t3 < 0) { - t3++; - } - - if (t3 > 1) { - t3--; - } - - if (6 * t3 < 1) { - val = t1 + (t2 - t1) * 6 * t3; - } else if (2 * t3 < 1) { - val = t2; - } else if (3 * t3 < 2) { - val = t1 + (t2 - t1) * (2 / 3 - t3) * 6; - } else { - val = t1; - } - - rgb[i] = val * 255; - } - - return rgb; -}; - -convert.hsl.hsv = function (hsl) { - const h = hsl[0]; - let s = hsl[1] / 100; - let l = hsl[2] / 100; - let smin = s; - const lmin = Math.max(l, 0.01); - - l *= 2; - s *= (l <= 1) ? l : 2 - l; - smin *= lmin <= 1 ? lmin : 2 - lmin; - const v = (l + s) / 2; - const sv = l === 0 ? (2 * smin) / (lmin + smin) : (2 * s) / (l + s); - - return [h, sv * 100, v * 100]; -}; - -convert.hsv.rgb = function (hsv) { - const h = hsv[0] / 60; - const s = hsv[1] / 100; - let v = hsv[2] / 100; - const hi = Math.floor(h) % 6; - - const f = h - Math.floor(h); - const p = 255 * v * (1 - s); - const q = 255 * v * (1 - (s * f)); - const t = 255 * v * (1 - (s * (1 - f))); - v *= 255; - - switch (hi) { - case 0: - return [v, t, p]; - case 1: - return [q, v, p]; - case 2: - return [p, v, t]; - case 3: - return [p, q, v]; - case 4: - return [t, p, v]; - case 5: - return [v, p, q]; - } -}; - -convert.hsv.hsl = function (hsv) { - const h = hsv[0]; - const s = hsv[1] / 100; - const v = hsv[2] / 100; - const vmin = Math.max(v, 0.01); - let sl; - let l; - - l = (2 - s) * v; - const lmin = (2 - s) * vmin; - sl = s * vmin; - sl /= (lmin <= 1) ? lmin : 2 - lmin; - sl = sl || 0; - l /= 2; - - return [h, sl * 100, l * 100]; -}; - -// http://dev.w3.org/csswg/css-color/#hwb-to-rgb -convert.hwb.rgb = function (hwb) { - const h = hwb[0] / 360; - let wh = hwb[1] / 100; - let bl = hwb[2] / 100; - const ratio = wh + bl; - let f; - - // Wh + bl cant be > 1 - if (ratio > 1) { - wh /= ratio; - bl /= ratio; - } - - const i = Math.floor(6 * h); - const v = 1 - bl; - f = 6 * h - i; - - if ((i & 0x01) !== 0) { - f = 1 - f; - } - - const n = wh + f * (v - wh); // Linear interpolation - - let r; - let g; - let b; - /* eslint-disable max-statements-per-line,no-multi-spaces */ - switch (i) { - default: - case 6: - case 0: r = v; g = n; b = wh; break; - case 1: r = n; g = v; b = wh; break; - case 2: r = wh; g = v; b = n; break; - case 3: r = wh; g = n; b = v; break; - case 4: r = n; g = wh; b = v; break; - case 5: r = v; g = wh; b = n; break; - } - /* eslint-enable max-statements-per-line,no-multi-spaces */ - - return [r * 255, g * 255, b * 255]; -}; - -convert.cmyk.rgb = function (cmyk) { - const c = cmyk[0] / 100; - const m = cmyk[1] / 100; - const y = cmyk[2] / 100; - const k = cmyk[3] / 100; - - const r = 1 - Math.min(1, c * (1 - k) + k); - const g = 1 - Math.min(1, m * (1 - k) + k); - const b = 1 - Math.min(1, y * (1 - k) + k); - - return [r * 255, g * 255, b * 255]; -}; - -convert.xyz.rgb = function (xyz) { - const x = xyz[0] / 100; - const y = xyz[1] / 100; - const z = xyz[2] / 100; - let r; - let g; - let b; - - r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986); - g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415); - b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570); - - // Assume sRGB - r = r > 0.0031308 - ? ((1.055 * (r ** (1.0 / 2.4))) - 0.055) - : r * 12.92; - - g = g > 0.0031308 - ? ((1.055 * (g ** (1.0 / 2.4))) - 0.055) - : g * 12.92; - - b = b > 0.0031308 - ? ((1.055 * (b ** (1.0 / 2.4))) - 0.055) - : b * 12.92; - - r = Math.min(Math.max(0, r), 1); - g = Math.min(Math.max(0, g), 1); - b = Math.min(Math.max(0, b), 1); - - return [r * 255, g * 255, b * 255]; -}; - -convert.xyz.lab = function (xyz) { - let x = xyz[0]; - let y = xyz[1]; - let z = xyz[2]; - - x /= 95.047; - y /= 100; - z /= 108.883; - - x = x > 0.008856 ? (x ** (1 / 3)) : (7.787 * x) + (16 / 116); - y = y > 0.008856 ? (y ** (1 / 3)) : (7.787 * y) + (16 / 116); - z = z > 0.008856 ? (z ** (1 / 3)) : (7.787 * z) + (16 / 116); - - const l = (116 * y) - 16; - const a = 500 * (x - y); - const b = 200 * (y - z); - - return [l, a, b]; -}; - -convert.lab.xyz = function (lab) { - const l = lab[0]; - const a = lab[1]; - const b = lab[2]; - let x; - let y; - let z; - - y = (l + 16) / 116; - x = a / 500 + y; - z = y - b / 200; - - const y2 = y ** 3; - const x2 = x ** 3; - const z2 = z ** 3; - y = y2 > 0.008856 ? y2 : (y - 16 / 116) / 7.787; - x = x2 > 0.008856 ? x2 : (x - 16 / 116) / 7.787; - z = z2 > 0.008856 ? z2 : (z - 16 / 116) / 7.787; - - x *= 95.047; - y *= 100; - z *= 108.883; - - return [x, y, z]; -}; - -convert.lab.lch = function (lab) { - const l = lab[0]; - const a = lab[1]; - const b = lab[2]; - let h; - - const hr = Math.atan2(b, a); - h = hr * 360 / 2 / Math.PI; - - if (h < 0) { - h += 360; - } - - const c = Math.sqrt(a * a + b * b); - - return [l, c, h]; -}; - -convert.lch.lab = function (lch) { - const l = lch[0]; - const c = lch[1]; - const h = lch[2]; - - const hr = h / 360 * 2 * Math.PI; - const a = c * Math.cos(hr); - const b = c * Math.sin(hr); - - return [l, a, b]; -}; - -convert.rgb.ansi16 = function (args, saturation = null) { - const [r, g, b] = args; - let value = saturation === null ? convert.rgb.hsv(args)[2] : saturation; // Hsv -> ansi16 optimization - - value = Math.round(value / 50); - - if (value === 0) { - return 30; - } - - let ansi = 30 - + ((Math.round(b / 255) << 2) - | (Math.round(g / 255) << 1) - | Math.round(r / 255)); - - if (value === 2) { - ansi += 60; - } - - return ansi; -}; - -convert.hsv.ansi16 = function (args) { - // Optimization here; we already know the value and don't need to get - // it converted for us. - return convert.rgb.ansi16(convert.hsv.rgb(args), args[2]); -}; - -convert.rgb.ansi256 = function (args) { - const r = args[0]; - const g = args[1]; - const b = args[2]; - - // We use the extended greyscale palette here, with the exception of - // black and white. normal palette only has 4 greyscale shades. - if (r === g && g === b) { - if (r < 8) { - return 16; - } - - if (r > 248) { - return 231; - } - - return Math.round(((r - 8) / 247) * 24) + 232; - } - - const ansi = 16 - + (36 * Math.round(r / 255 * 5)) - + (6 * Math.round(g / 255 * 5)) - + Math.round(b / 255 * 5); - - return ansi; -}; - -convert.ansi16.rgb = function (args) { - let color = args % 10; - - // Handle greyscale - if (color === 0 || color === 7) { - if (args > 50) { - color += 3.5; - } - - color = color / 10.5 * 255; - - return [color, color, color]; - } - - const mult = (~~(args > 50) + 1) * 0.5; - const r = ((color & 1) * mult) * 255; - const g = (((color >> 1) & 1) * mult) * 255; - const b = (((color >> 2) & 1) * mult) * 255; - - return [r, g, b]; -}; - -convert.ansi256.rgb = function (args) { - // Handle greyscale - if (args >= 232) { - const c = (args - 232) * 10 + 8; - return [c, c, c]; - } - - args -= 16; - - let rem; - const r = Math.floor(args / 36) / 5 * 255; - const g = Math.floor((rem = args % 36) / 6) / 5 * 255; - const b = (rem % 6) / 5 * 255; - - return [r, g, b]; -}; - -convert.rgb.hex = function (args) { - const integer = ((Math.round(args[0]) & 0xFF) << 16) - + ((Math.round(args[1]) & 0xFF) << 8) - + (Math.round(args[2]) & 0xFF); - - const string = integer.toString(16).toUpperCase(); - return '000000'.substring(string.length) + string; -}; - -convert.hex.rgb = function (args) { - const match = args.toString(16).match(/[a-f0-9]{6}|[a-f0-9]{3}/i); - if (!match) { - return [0, 0, 0]; - } - - let colorString = match[0]; - - if (match[0].length === 3) { - colorString = colorString.split('').map(char => { - return char + char; - }).join(''); - } - - const integer = parseInt(colorString, 16); - const r = (integer >> 16) & 0xFF; - const g = (integer >> 8) & 0xFF; - const b = integer & 0xFF; - - return [r, g, b]; -}; - -convert.rgb.hcg = function (rgb) { - const r = rgb[0] / 255; - const g = rgb[1] / 255; - const b = rgb[2] / 255; - const max = Math.max(Math.max(r, g), b); - const min = Math.min(Math.min(r, g), b); - const chroma = (max - min); - let grayscale; - let hue; - - if (chroma < 1) { - grayscale = min / (1 - chroma); - } else { - grayscale = 0; - } - - if (chroma <= 0) { - hue = 0; - } else - if (max === r) { - hue = ((g - b) / chroma) % 6; - } else - if (max === g) { - hue = 2 + (b - r) / chroma; - } else { - hue = 4 + (r - g) / chroma; - } - - hue /= 6; - hue %= 1; - - return [hue * 360, chroma * 100, grayscale * 100]; -}; - -convert.hsl.hcg = function (hsl) { - const s = hsl[1] / 100; - const l = hsl[2] / 100; - - const c = l < 0.5 ? (2.0 * s * l) : (2.0 * s * (1.0 - l)); - - let f = 0; - if (c < 1.0) { - f = (l - 0.5 * c) / (1.0 - c); - } - - return [hsl[0], c * 100, f * 100]; -}; - -convert.hsv.hcg = function (hsv) { - const s = hsv[1] / 100; - const v = hsv[2] / 100; - - const c = s * v; - let f = 0; - - if (c < 1.0) { - f = (v - c) / (1 - c); - } - - return [hsv[0], c * 100, f * 100]; -}; - -convert.hcg.rgb = function (hcg) { - const h = hcg[0] / 360; - const c = hcg[1] / 100; - const g = hcg[2] / 100; - - if (c === 0.0) { - return [g * 255, g * 255, g * 255]; - } - - const pure = [0, 0, 0]; - const hi = (h % 1) * 6; - const v = hi % 1; - const w = 1 - v; - let mg = 0; - - /* eslint-disable max-statements-per-line */ - switch (Math.floor(hi)) { - case 0: - pure[0] = 1; pure[1] = v; pure[2] = 0; break; - case 1: - pure[0] = w; pure[1] = 1; pure[2] = 0; break; - case 2: - pure[0] = 0; pure[1] = 1; pure[2] = v; break; - case 3: - pure[0] = 0; pure[1] = w; pure[2] = 1; break; - case 4: - pure[0] = v; pure[1] = 0; pure[2] = 1; break; - default: - pure[0] = 1; pure[1] = 0; pure[2] = w; - } - /* eslint-enable max-statements-per-line */ - - mg = (1.0 - c) * g; - - return [ - (c * pure[0] + mg) * 255, - (c * pure[1] + mg) * 255, - (c * pure[2] + mg) * 255 - ]; -}; - -convert.hcg.hsv = function (hcg) { - const c = hcg[1] / 100; - const g = hcg[2] / 100; - - const v = c + g * (1.0 - c); - let f = 0; - - if (v > 0.0) { - f = c / v; - } - - return [hcg[0], f * 100, v * 100]; -}; - -convert.hcg.hsl = function (hcg) { - const c = hcg[1] / 100; - const g = hcg[2] / 100; - - const l = g * (1.0 - c) + 0.5 * c; - let s = 0; - - if (l > 0.0 && l < 0.5) { - s = c / (2 * l); - } else - if (l >= 0.5 && l < 1.0) { - s = c / (2 * (1 - l)); - } - - return [hcg[0], s * 100, l * 100]; -}; - -convert.hcg.hwb = function (hcg) { - const c = hcg[1] / 100; - const g = hcg[2] / 100; - const v = c + g * (1.0 - c); - return [hcg[0], (v - c) * 100, (1 - v) * 100]; -}; - -convert.hwb.hcg = function (hwb) { - const w = hwb[1] / 100; - const b = hwb[2] / 100; - const v = 1 - b; - const c = v - w; - let g = 0; - - if (c < 1) { - g = (v - c) / (1 - c); - } - - return [hwb[0], c * 100, g * 100]; -}; - -convert.apple.rgb = function (apple) { - return [(apple[0] / 65535) * 255, (apple[1] / 65535) * 255, (apple[2] / 65535) * 255]; -}; - -convert.rgb.apple = function (rgb) { - return [(rgb[0] / 255) * 65535, (rgb[1] / 255) * 65535, (rgb[2] / 255) * 65535]; -}; - -convert.gray.rgb = function (args) { - return [args[0] / 100 * 255, args[0] / 100 * 255, args[0] / 100 * 255]; -}; - -convert.gray.hsl = function (args) { - return [0, 0, args[0]]; -}; - -convert.gray.hsv = convert.gray.hsl; - -convert.gray.hwb = function (gray) { - return [0, 100, gray[0]]; -}; - -convert.gray.cmyk = function (gray) { - return [0, 0, 0, gray[0]]; -}; - -convert.gray.lab = function (gray) { - return [gray[0], 0, 0]; -}; - -convert.gray.hex = function (gray) { - const val = Math.round(gray[0] / 100 * 255) & 0xFF; - const integer = (val << 16) + (val << 8) + val; - - const string = integer.toString(16).toUpperCase(); - return '000000'.substring(string.length) + string; -}; - -convert.rgb.gray = function (rgb) { - const val = (rgb[0] + rgb[1] + rgb[2]) / 3; - return [val / 255 * 100]; -}; diff --git a/node_modules/color-convert/index.js b/node_modules/color-convert/index.js deleted file mode 100644 index b648e57..0000000 --- a/node_modules/color-convert/index.js +++ /dev/null @@ -1,81 +0,0 @@ -const conversions = require('./conversions'); -const route = require('./route'); - -const convert = {}; - -const models = Object.keys(conversions); - -function wrapRaw(fn) { - const wrappedFn = function (...args) { - const arg0 = args[0]; - if (arg0 === undefined || arg0 === null) { - return arg0; - } - - if (arg0.length > 1) { - args = arg0; - } - - return fn(args); - }; - - // Preserve .conversion property if there is one - if ('conversion' in fn) { - wrappedFn.conversion = fn.conversion; - } - - return wrappedFn; -} - -function wrapRounded(fn) { - const wrappedFn = function (...args) { - const arg0 = args[0]; - - if (arg0 === undefined || arg0 === null) { - return arg0; - } - - if (arg0.length > 1) { - args = arg0; - } - - const result = fn(args); - - // We're assuming the result is an array here. - // see notice in conversions.js; don't use box types - // in conversion functions. - if (typeof result === 'object') { - for (let len = result.length, i = 0; i < len; i++) { - result[i] = Math.round(result[i]); - } - } - - return result; - }; - - // Preserve .conversion property if there is one - if ('conversion' in fn) { - wrappedFn.conversion = fn.conversion; - } - - return wrappedFn; -} - -models.forEach(fromModel => { - convert[fromModel] = {}; - - Object.defineProperty(convert[fromModel], 'channels', {value: conversions[fromModel].channels}); - Object.defineProperty(convert[fromModel], 'labels', {value: conversions[fromModel].labels}); - - const routes = route(fromModel); - const routeModels = Object.keys(routes); - - routeModels.forEach(toModel => { - const fn = routes[toModel]; - - convert[fromModel][toModel] = wrapRounded(fn); - convert[fromModel][toModel].raw = wrapRaw(fn); - }); -}); - -module.exports = convert; diff --git a/node_modules/color-convert/package.json b/node_modules/color-convert/package.json deleted file mode 100644 index 6e48000..0000000 --- a/node_modules/color-convert/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "color-convert", - "description": "Plain color conversion functions", - "version": "2.0.1", - "author": "Heather Arthur ", - "license": "MIT", - "repository": "Qix-/color-convert", - "scripts": { - "pretest": "xo", - "test": "node test/basic.js" - }, - "engines": { - "node": ">=7.0.0" - }, - "keywords": [ - "color", - "colour", - "convert", - "converter", - "conversion", - "rgb", - "hsl", - "hsv", - "hwb", - "cmyk", - "ansi", - "ansi16" - ], - "files": [ - "index.js", - "conversions.js", - "route.js" - ], - "xo": { - "rules": { - "default-case": 0, - "no-inline-comments": 0, - "operator-linebreak": 0 - } - }, - "devDependencies": { - "chalk": "^2.4.2", - "xo": "^0.24.0" - }, - "dependencies": { - "color-name": "~1.1.4" - } -} diff --git a/node_modules/color-convert/route.js b/node_modules/color-convert/route.js deleted file mode 100644 index 1a08521..0000000 --- a/node_modules/color-convert/route.js +++ /dev/null @@ -1,97 +0,0 @@ -const conversions = require('./conversions'); - -/* - This function routes a model to all other models. - - all functions that are routed have a property `.conversion` attached - to the returned synthetic function. This property is an array - of strings, each with the steps in between the 'from' and 'to' - color models (inclusive). - - conversions that are not possible simply are not included. -*/ - -function buildGraph() { - const graph = {}; - // https://jsperf.com/object-keys-vs-for-in-with-closure/3 - const models = Object.keys(conversions); - - for (let len = models.length, i = 0; i < len; i++) { - graph[models[i]] = { - // http://jsperf.com/1-vs-infinity - // micro-opt, but this is simple. - distance: -1, - parent: null - }; - } - - return graph; -} - -// https://en.wikipedia.org/wiki/Breadth-first_search -function deriveBFS(fromModel) { - const graph = buildGraph(); - const queue = [fromModel]; // Unshift -> queue -> pop - - graph[fromModel].distance = 0; - - while (queue.length) { - const current = queue.pop(); - const adjacents = Object.keys(conversions[current]); - - for (let len = adjacents.length, i = 0; i < len; i++) { - const adjacent = adjacents[i]; - const node = graph[adjacent]; - - if (node.distance === -1) { - node.distance = graph[current].distance + 1; - node.parent = current; - queue.unshift(adjacent); - } - } - } - - return graph; -} - -function link(from, to) { - return function (args) { - return to(from(args)); - }; -} - -function wrapConversion(toModel, graph) { - const path = [graph[toModel].parent, toModel]; - let fn = conversions[graph[toModel].parent][toModel]; - - let cur = graph[toModel].parent; - while (graph[cur].parent) { - path.unshift(graph[cur].parent); - fn = link(conversions[graph[cur].parent][cur], fn); - cur = graph[cur].parent; - } - - fn.conversion = path; - return fn; -} - -module.exports = function (fromModel) { - const graph = deriveBFS(fromModel); - const conversion = {}; - - const models = Object.keys(graph); - for (let len = models.length, i = 0; i < len; i++) { - const toModel = models[i]; - const node = graph[toModel]; - - if (node.parent === null) { - // No possible conversion, or this node is the source model. - continue; - } - - conversion[toModel] = wrapConversion(toModel, graph); - } - - return conversion; -}; - diff --git a/node_modules/color-name/LICENSE b/node_modules/color-name/LICENSE deleted file mode 100644 index c6b1001..0000000 --- a/node_modules/color-name/LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -The MIT License (MIT) -Copyright (c) 2015 Dmitry Ivanov - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/color-name/README.md b/node_modules/color-name/README.md deleted file mode 100644 index 932b979..0000000 --- a/node_modules/color-name/README.md +++ /dev/null @@ -1,11 +0,0 @@ -A JSON with color names and its values. Based on http://dev.w3.org/csswg/css-color/#named-colors. - -[![NPM](https://nodei.co/npm/color-name.png?mini=true)](https://nodei.co/npm/color-name/) - - -```js -var colors = require('color-name'); -colors.red //[255,0,0] -``` - - diff --git a/node_modules/color-name/index.js b/node_modules/color-name/index.js deleted file mode 100644 index b7c198a..0000000 --- a/node_modules/color-name/index.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict' - -module.exports = { - "aliceblue": [240, 248, 255], - "antiquewhite": [250, 235, 215], - "aqua": [0, 255, 255], - "aquamarine": [127, 255, 212], - "azure": [240, 255, 255], - "beige": [245, 245, 220], - "bisque": [255, 228, 196], - "black": [0, 0, 0], - "blanchedalmond": [255, 235, 205], - "blue": [0, 0, 255], - "blueviolet": [138, 43, 226], - "brown": [165, 42, 42], - "burlywood": [222, 184, 135], - "cadetblue": [95, 158, 160], - "chartreuse": [127, 255, 0], - "chocolate": [210, 105, 30], - "coral": [255, 127, 80], - "cornflowerblue": [100, 149, 237], - "cornsilk": [255, 248, 220], - "crimson": [220, 20, 60], - "cyan": [0, 255, 255], - "darkblue": [0, 0, 139], - "darkcyan": [0, 139, 139], - "darkgoldenrod": [184, 134, 11], - "darkgray": [169, 169, 169], - "darkgreen": [0, 100, 0], - "darkgrey": [169, 169, 169], - "darkkhaki": [189, 183, 107], - "darkmagenta": [139, 0, 139], - "darkolivegreen": [85, 107, 47], - "darkorange": [255, 140, 0], - "darkorchid": [153, 50, 204], - "darkred": [139, 0, 0], - "darksalmon": [233, 150, 122], - "darkseagreen": [143, 188, 143], - "darkslateblue": [72, 61, 139], - "darkslategray": [47, 79, 79], - "darkslategrey": [47, 79, 79], - "darkturquoise": [0, 206, 209], - "darkviolet": [148, 0, 211], - "deeppink": [255, 20, 147], - "deepskyblue": [0, 191, 255], - "dimgray": [105, 105, 105], - "dimgrey": [105, 105, 105], - "dodgerblue": [30, 144, 255], - "firebrick": [178, 34, 34], - "floralwhite": [255, 250, 240], - "forestgreen": [34, 139, 34], - "fuchsia": [255, 0, 255], - "gainsboro": [220, 220, 220], - "ghostwhite": [248, 248, 255], - "gold": [255, 215, 0], - "goldenrod": [218, 165, 32], - "gray": [128, 128, 128], - "green": [0, 128, 0], - "greenyellow": [173, 255, 47], - "grey": [128, 128, 128], - "honeydew": [240, 255, 240], - "hotpink": [255, 105, 180], - "indianred": [205, 92, 92], - "indigo": [75, 0, 130], - "ivory": [255, 255, 240], - "khaki": [240, 230, 140], - "lavender": [230, 230, 250], - "lavenderblush": [255, 240, 245], - "lawngreen": [124, 252, 0], - "lemonchiffon": [255, 250, 205], - "lightblue": [173, 216, 230], - "lightcoral": [240, 128, 128], - "lightcyan": [224, 255, 255], - "lightgoldenrodyellow": [250, 250, 210], - "lightgray": [211, 211, 211], - "lightgreen": [144, 238, 144], - "lightgrey": [211, 211, 211], - "lightpink": [255, 182, 193], - "lightsalmon": [255, 160, 122], - "lightseagreen": [32, 178, 170], - "lightskyblue": [135, 206, 250], - "lightslategray": [119, 136, 153], - "lightslategrey": [119, 136, 153], - "lightsteelblue": [176, 196, 222], - "lightyellow": [255, 255, 224], - "lime": [0, 255, 0], - "limegreen": [50, 205, 50], - "linen": [250, 240, 230], - "magenta": [255, 0, 255], - "maroon": [128, 0, 0], - "mediumaquamarine": [102, 205, 170], - "mediumblue": [0, 0, 205], - "mediumorchid": [186, 85, 211], - "mediumpurple": [147, 112, 219], - "mediumseagreen": [60, 179, 113], - "mediumslateblue": [123, 104, 238], - "mediumspringgreen": [0, 250, 154], - "mediumturquoise": [72, 209, 204], - "mediumvioletred": [199, 21, 133], - "midnightblue": [25, 25, 112], - "mintcream": [245, 255, 250], - "mistyrose": [255, 228, 225], - "moccasin": [255, 228, 181], - "navajowhite": [255, 222, 173], - "navy": [0, 0, 128], - "oldlace": [253, 245, 230], - "olive": [128, 128, 0], - "olivedrab": [107, 142, 35], - "orange": [255, 165, 0], - "orangered": [255, 69, 0], - "orchid": [218, 112, 214], - "palegoldenrod": [238, 232, 170], - "palegreen": [152, 251, 152], - "paleturquoise": [175, 238, 238], - "palevioletred": [219, 112, 147], - "papayawhip": [255, 239, 213], - "peachpuff": [255, 218, 185], - "peru": [205, 133, 63], - "pink": [255, 192, 203], - "plum": [221, 160, 221], - "powderblue": [176, 224, 230], - "purple": [128, 0, 128], - "rebeccapurple": [102, 51, 153], - "red": [255, 0, 0], - "rosybrown": [188, 143, 143], - "royalblue": [65, 105, 225], - "saddlebrown": [139, 69, 19], - "salmon": [250, 128, 114], - "sandybrown": [244, 164, 96], - "seagreen": [46, 139, 87], - "seashell": [255, 245, 238], - "sienna": [160, 82, 45], - "silver": [192, 192, 192], - "skyblue": [135, 206, 235], - "slateblue": [106, 90, 205], - "slategray": [112, 128, 144], - "slategrey": [112, 128, 144], - "snow": [255, 250, 250], - "springgreen": [0, 255, 127], - "steelblue": [70, 130, 180], - "tan": [210, 180, 140], - "teal": [0, 128, 128], - "thistle": [216, 191, 216], - "tomato": [255, 99, 71], - "turquoise": [64, 224, 208], - "violet": [238, 130, 238], - "wheat": [245, 222, 179], - "white": [255, 255, 255], - "whitesmoke": [245, 245, 245], - "yellow": [255, 255, 0], - "yellowgreen": [154, 205, 50] -}; diff --git a/node_modules/color-name/package.json b/node_modules/color-name/package.json deleted file mode 100644 index 782dd82..0000000 --- a/node_modules/color-name/package.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "color-name", - "version": "1.1.4", - "description": "A list of color names and its values", - "main": "index.js", - "files": [ - "index.js" - ], - "scripts": { - "test": "node test.js" - }, - "repository": { - "type": "git", - "url": "git@github.com:colorjs/color-name.git" - }, - "keywords": [ - "color-name", - "color", - "color-keyword", - "keyword" - ], - "author": "DY ", - "license": "MIT", - "bugs": { - "url": "https://github.com/colorjs/color-name/issues" - }, - "homepage": "https://github.com/colorjs/color-name" -} diff --git a/node_modules/dependency-graph/CHANGELOG.md b/node_modules/dependency-graph/CHANGELOG.md deleted file mode 100755 index 5e6c4d9..0000000 --- a/node_modules/dependency-graph/CHANGELOG.md +++ /dev/null @@ -1,82 +0,0 @@ -# Dependency Graph Changelog - -## 0.11.0 (March 5, 2021) - -- Add `entryNodes` method that returns the nodes that nothing depends on - thanks [amcdnl](https://github.com/amcdnl)! - -## 0.10.0 (January 9, 2021) - -- Add `directDependenciesOf` and `directDependantsOf` methods for retrieving direct dependency information. (Fixes #40) -- Add aliases `dependentsOf` and `directDependentsOf`. - -## 0.9.0 (February 10, 2020) - -- Rewrite the topological sort DFS to be more efficient (and work!) on large graphs. - - No longer uses recursion to avoid stack overflows with large/deep graphs - - No longer is accidentally `O(N^2)` (thanks [willtennien](https://github.com/willtennien) for pointing this out!) - -## 0.8.1 (December 3, 2019) - -- Ensure all nodes are included in overallOrder when cycles are allowed. (Fixes #33) - -## 0.8.0 (December 11, 2018) - -- Add a `DepGraphCycleError` with cyclePath property - thanks [jhugman](https://github.com/jhugman)! - -## 0.7.2 (August 30, 2018) - -- Make constructor parameter optional in Typescript definition. (Fixes #26) - -## 0.7.1 (June 5, 2018) - -- Fix Typescript definition to include the new constructor arguments added in `0.7.0` - thanks [tbranyen](https://github.com/tbranyen)! - -## 0.7.0 (January 17, 2018) - -- Allow circular dependencies by passing in `{circular: true}` into the constructor - thanks [tbranyen](https://github.com/tbranyen)! - -## 0.6.0 (October 22, 2017) - -- Add a `size` method that will return the number of nodes in the graph. -- Add a `clone` method that will clone the graph. Any custom node data will only be shallow-copied. (Fixes #14) - -## 0.5.2 (October 22, 2017) - -- Add missing parameter in TypeScript definition. (Fixes #19) - -## 0.5.1 (October 7, 2017) - -- Now exposes Typescript type definition - thanks [vangorra](https://github.com/vangorra)! - -## 0.5.0 (April 26, 2016) - -- Add optional data parameter for the addNode method. (Fixes #12) -- Add methods getNodeData and setNodeData to manipulate the data associated with a node name. (Fixes #12) -- Change the hasNode method to be able to cope with falsy node data. (Fixes #12) - -## 0.4.1 (Sept 3, 2015) - -- Check all nodes for potential cycles when calculating overall order. (Fixes #8) - -## 0.4.0 (Aug 1, 2015) - -- Better error messages - - When a cycle is detected, the error message will now include the cycle in it. E.g `Dependency Cycle Found: a -> b -> c -> a` (Fixes #7) - - When calling `addDependency` if one of the nodes does not exist, the error will say which one it was (instead of saying that "one" of the two nodes did not exist and making you manually determine which one) -- Calling `overallOrder` on an empty graph will no longer throw an error about a dependency cycle. It will return an empty array. - -## 0.3.0 (July 24, 2015) - -- Fix issue where if you call `addNode` twice with the same name, it would clear all edges for that node. Now it will do nothing if a node with the specified name already exists. (Fixes #3) - -## 0.2.1 (July 3, 2015) - -- Fixed removeNode leaving references in outgoingEdges and reference to non-existent var edges - thanks [juhoha](https://github.com/juhoha)! (Fixes #2) - -## 0.2.0 (May 1, 2015) - -- Removed dependency on Underscore - thanks [myndzi](https://github.com/myndzi)! (Fixes #1) - -## 0.1.0 (May 18, 2013) - -- Initial Release - extracted out of asset-smasher diff --git a/node_modules/dependency-graph/LICENSE b/node_modules/dependency-graph/LICENSE deleted file mode 100755 index 6ebe5b6..0000000 --- a/node_modules/dependency-graph/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2013-2020 by Jim Riecken - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/dependency-graph/README.md b/node_modules/dependency-graph/README.md deleted file mode 100755 index 763aaf8..0000000 --- a/node_modules/dependency-graph/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# Dependency Graph - -Simple dependency graph - -## Overview - -This is a simple dependency graph useful for determining the order to do a list of things that depend on certain items being done before they are. - -To use, `npm install dependency-graph` and then `require('dependency-graph').DepGraph` - -## API - -### DepGraph - -Nodes in the graph are just simple strings with optional data associated with them. - - - `addNode(name, data)` - add a node in the graph with optional data. If `data` is not given, `name` will be used as data - - `removeNode(name)` - remove a node from the graph - - `hasNode(name)` - check if a node exists in the graph - - `size()` - return the number of nodes in the graph - - `getNodeData(name)` - get the data associated with a node (will throw an `Error` if the node does not exist) - - `setNodeData(name, data)` - set the data for an existing node (will throw an `Error` if the node does not exist) - - `addDependency(from, to)` - add a dependency between two nodes (will throw an `Error` if one of the nodes does not exist) - - `removeDependency(from, to)` - remove a dependency between two nodes - - `clone()` - return a clone of the graph. Any data attached to the nodes will only be *shallow-copied* - - `dependenciesOf(name, leavesOnly)` - get an array containing the nodes that the specified node depends on (transitively). If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned in the array. - - `dependantsOf(name, leavesOnly)` (aliased as `dependentsOf`) - get an array containing the nodes that depend on the specified node (transitively). If `leavesOnly` is true, only nodes that do not have any dependants will be returned in the array. - - `directDependenciesOf(name)` - get an array containing the direct dependencies of the specified node - - `directDependantsOf(name)` (aliased as `directDependentsOf`) - get an array containing the nodes that directly depend on the specified node - - `overallOrder(leavesOnly)` - construct the overall processing order for the dependency graph. If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned. - - `entryNodes()` - array of nodes that have no dependants (i.e. nothing depends on them). - -Dependency Cycles are detected when running `dependenciesOf`, `dependantsOf`, and `overallOrder` and if one is found, a `DepGraphCycleError` will be thrown that includes what the cycle was in the message as well as the `cyclePath` property: e.g. `Dependency Cycle Found: a -> b -> c -> a`. If you wish to silence this error, pass `circular: true` when instantiating `DepGraph` (more below). - -## Examples - - var DepGraph = require('dependency-graph').DepGraph; - - var graph = new DepGraph(); - graph.addNode('a'); - graph.addNode('b'); - graph.addNode('c'); - - graph.size() // 3 - - graph.addDependency('a', 'b'); - graph.addDependency('b', 'c'); - - graph.dependenciesOf('a'); // ['c', 'b'] - graph.dependenciesOf('b'); // ['c'] - graph.dependantsOf('c'); // ['a', 'b'] - - graph.overallOrder(); // ['c', 'b', 'a'] - graph.overallOrder(true); // ['c'] - graph.entryNodes(); // ['a'] - - graph.addNode('d', 'data'); - - graph.getNodeData('d'); // 'data' - - graph.setNodeData('d', 'newData'); - - graph.getNodeData('d'); // 'newData' - - var circularGraph = new DepGraph({ circular: true }); - - circularGraph.addNode('a'); - circularGraph.addNode('b'); - circularGraph.addNode('c'); - circularGraph.addNode('d'); - - circularGraph.addDependency('a', 'b'); - circularGraph.addDependency('b', 'c'); // b depends on c - circularGraph.addDependency('c', 'a'); // c depends on a, which depends on b - circularGraph.addDependency('d', 'a'); - - circularGraph.dependenciesOf('b'); // ['a', 'c'] - circularGraph.overallOrder(); // ['c', 'b', 'a', 'd'] diff --git a/node_modules/dependency-graph/lib/dep_graph.js b/node_modules/dependency-graph/lib/dep_graph.js deleted file mode 100755 index a64f801..0000000 --- a/node_modules/dependency-graph/lib/dep_graph.js +++ /dev/null @@ -1,364 +0,0 @@ -/** - * A simple dependency graph - */ - -/** - * Helper for creating a Topological Sort using Depth-First-Search on a set of edges. - * - * Detects cycles and throws an Error if one is detected (unless the "circular" - * parameter is "true" in which case it ignores them). - * - * @param edges The set of edges to DFS through - * @param leavesOnly Whether to only return "leaf" nodes (ones who have no edges) - * @param result An array in which the results will be populated - * @param circular A boolean to allow circular dependencies - */ -function createDFS(edges, leavesOnly, result, circular) { - var visited = {}; - return function (start) { - if (visited[start]) { - return; - } - var inCurrentPath = {}; - var currentPath = []; - var todo = []; // used as a stack - todo.push({ node: start, processed: false }); - while (todo.length > 0) { - var current = todo[todo.length - 1]; // peek at the todo stack - var processed = current.processed; - var node = current.node; - if (!processed) { - // Haven't visited edges yet (visiting phase) - if (visited[node]) { - todo.pop(); - continue; - } else if (inCurrentPath[node]) { - // It's not a DAG - if (circular) { - todo.pop(); - // If we're tolerating cycles, don't revisit the node - continue; - } - currentPath.push(node); - throw new DepGraphCycleError(currentPath); - } - - inCurrentPath[node] = true; - currentPath.push(node); - var nodeEdges = edges[node]; - // (push edges onto the todo stack in reverse order to be order-compatible with the old DFS implementation) - for (var i = nodeEdges.length - 1; i >= 0; i--) { - todo.push({ node: nodeEdges[i], processed: false }); - } - current.processed = true; - } else { - // Have visited edges (stack unrolling phase) - todo.pop(); - currentPath.pop(); - inCurrentPath[node] = false; - visited[node] = true; - if (!leavesOnly || edges[node].length === 0) { - result.push(node); - } - } - } - }; -} - -/** - * Simple Dependency Graph - */ -var DepGraph = (exports.DepGraph = function DepGraph(opts) { - this.nodes = {}; // Node -> Node/Data (treated like a Set) - this.outgoingEdges = {}; // Node -> [Dependency Node] - this.incomingEdges = {}; // Node -> [Dependant Node] - this.circular = opts && !!opts.circular; // Allows circular deps -}); -DepGraph.prototype = { - /** - * The number of nodes in the graph. - */ - size: function () { - return Object.keys(this.nodes).length; - }, - /** - * Add a node to the dependency graph. If a node already exists, this method will do nothing. - */ - addNode: function (node, data) { - if (!this.hasNode(node)) { - // Checking the arguments length allows the user to add a node with undefined data - if (arguments.length === 2) { - this.nodes[node] = data; - } else { - this.nodes[node] = node; - } - this.outgoingEdges[node] = []; - this.incomingEdges[node] = []; - } - }, - /** - * Remove a node from the dependency graph. If a node does not exist, this method will do nothing. - */ - removeNode: function (node) { - if (this.hasNode(node)) { - delete this.nodes[node]; - delete this.outgoingEdges[node]; - delete this.incomingEdges[node]; - [this.incomingEdges, this.outgoingEdges].forEach(function (edgeList) { - Object.keys(edgeList).forEach(function (key) { - var idx = edgeList[key].indexOf(node); - if (idx >= 0) { - edgeList[key].splice(idx, 1); - } - }, this); - }); - } - }, - /** - * Check if a node exists in the graph - */ - hasNode: function (node) { - return this.nodes.hasOwnProperty(node); - }, - /** - * Get the data associated with a node name - */ - getNodeData: function (node) { - if (this.hasNode(node)) { - return this.nodes[node]; - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * Set the associated data for a given node name. If the node does not exist, this method will throw an error - */ - setNodeData: function (node, data) { - if (this.hasNode(node)) { - this.nodes[node] = data; - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * Add a dependency between two nodes. If either of the nodes does not exist, - * an Error will be thrown. - */ - addDependency: function (from, to) { - if (!this.hasNode(from)) { - throw new Error("Node does not exist: " + from); - } - if (!this.hasNode(to)) { - throw new Error("Node does not exist: " + to); - } - if (this.outgoingEdges[from].indexOf(to) === -1) { - this.outgoingEdges[from].push(to); - } - if (this.incomingEdges[to].indexOf(from) === -1) { - this.incomingEdges[to].push(from); - } - return true; - }, - /** - * Remove a dependency between two nodes. - */ - removeDependency: function (from, to) { - var idx; - if (this.hasNode(from)) { - idx = this.outgoingEdges[from].indexOf(to); - if (idx >= 0) { - this.outgoingEdges[from].splice(idx, 1); - } - } - - if (this.hasNode(to)) { - idx = this.incomingEdges[to].indexOf(from); - if (idx >= 0) { - this.incomingEdges[to].splice(idx, 1); - } - } - }, - /** - * Return a clone of the dependency graph. If any custom data is attached - * to the nodes, it will only be shallow copied. - */ - clone: function () { - var source = this; - var result = new DepGraph(); - var keys = Object.keys(source.nodes); - keys.forEach(function (n) { - result.nodes[n] = source.nodes[n]; - result.outgoingEdges[n] = source.outgoingEdges[n].slice(0); - result.incomingEdges[n] = source.incomingEdges[n].slice(0); - }); - return result; - }, - /** - * Get an array containing the direct dependencies of the specified node. - * - * Throws an Error if the specified node does not exist. - */ - directDependenciesOf: function (node) { - if (this.hasNode(node)) { - return this.outgoingEdges[node].slice(0); - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * Get an array containing the nodes that directly depend on the specified node. - * - * Throws an Error if the specified node does not exist. - */ - directDependantsOf: function (node) { - if (this.hasNode(node)) { - return this.incomingEdges[node].slice(0); - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * Get an array containing the nodes that the specified node depends on (transitively). - * - * Throws an Error if the graph has a cycle, or the specified node does not exist. - * - * If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned - * in the array. - */ - dependenciesOf: function (node, leavesOnly) { - if (this.hasNode(node)) { - var result = []; - var DFS = createDFS( - this.outgoingEdges, - leavesOnly, - result, - this.circular - ); - DFS(node); - var idx = result.indexOf(node); - if (idx >= 0) { - result.splice(idx, 1); - } - return result; - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * get an array containing the nodes that depend on the specified node (transitively). - * - * Throws an Error if the graph has a cycle, or the specified node does not exist. - * - * If `leavesOnly` is true, only nodes that do not have any dependants will be returned in the array. - */ - dependantsOf: function (node, leavesOnly) { - if (this.hasNode(node)) { - var result = []; - var DFS = createDFS( - this.incomingEdges, - leavesOnly, - result, - this.circular - ); - DFS(node); - var idx = result.indexOf(node); - if (idx >= 0) { - result.splice(idx, 1); - } - return result; - } else { - throw new Error("Node does not exist: " + node); - } - }, - /** - * Construct the overall processing order for the dependency graph. - * - * Throws an Error if the graph has a cycle. - * - * If `leavesOnly` is true, only nodes that do not depend on any other nodes will be returned. - */ - overallOrder: function (leavesOnly) { - var self = this; - var result = []; - var keys = Object.keys(this.nodes); - if (keys.length === 0) { - return result; // Empty graph - } else { - if (!this.circular) { - // Look for cycles - we run the DFS starting at all the nodes in case there - // are several disconnected subgraphs inside this dependency graph. - var CycleDFS = createDFS(this.outgoingEdges, false, [], this.circular); - keys.forEach(function (n) { - CycleDFS(n); - }); - } - - var DFS = createDFS( - this.outgoingEdges, - leavesOnly, - result, - this.circular - ); - // Find all potential starting points (nodes with nothing depending on them) an - // run a DFS starting at these points to get the order - keys - .filter(function (node) { - return self.incomingEdges[node].length === 0; - }) - .forEach(function (n) { - DFS(n); - }); - - // If we're allowing cycles - we need to run the DFS against any remaining - // nodes that did not end up in the initial result (as they are part of a - // subgraph that does not have a clear starting point) - if (this.circular) { - keys - .filter(function (node) { - return result.indexOf(node) === -1; - }) - .forEach(function (n) { - DFS(n); - }); - } - - return result; - } - }, - /** - * Get an array of nodes that have no dependants (i.e. nothing depends on them). - */ - entryNodes: function () { - var self = this; - return Object.keys(this.nodes).filter(function (node) { - return self.incomingEdges[node].length === 0; - }); - } -}; - -// Create some aliases -DepGraph.prototype.directDependentsOf = DepGraph.prototype.directDependantsOf; -DepGraph.prototype.dependentsOf = DepGraph.prototype.dependantsOf; - -/** - * Cycle error, including the path of the cycle. - */ -var DepGraphCycleError = (exports.DepGraphCycleError = function (cyclePath) { - var message = "Dependency Cycle Found: " + cyclePath.join(" -> "); - var instance = new Error(message); - instance.cyclePath = cyclePath; - Object.setPrototypeOf(instance, Object.getPrototypeOf(this)); - if (Error.captureStackTrace) { - Error.captureStackTrace(instance, DepGraphCycleError); - } - return instance; -}); -DepGraphCycleError.prototype = Object.create(Error.prototype, { - constructor: { - value: Error, - enumerable: false, - writable: true, - configurable: true - } -}); -Object.setPrototypeOf(DepGraphCycleError, Error); diff --git a/node_modules/dependency-graph/lib/index.d.ts b/node_modules/dependency-graph/lib/index.d.ts deleted file mode 100755 index 6ed8de5..0000000 --- a/node_modules/dependency-graph/lib/index.d.ts +++ /dev/null @@ -1,127 +0,0 @@ -declare module 'dependency-graph' { - export interface Options { - circular?: boolean; - } - - export class DepGraph { - /** - * Creates an instance of DepGraph with optional Options. - */ - constructor(opts?: Options); - - /** - * The number of nodes in the graph. - */ - size(): number; - - /** - * Add a node in the graph with optional data. If data is not given, name will be used as data. - * @param {string} name - * @param data - */ - addNode(name: string, data?: T): void; - - /** - * Remove a node from the graph. - * @param {string} name - */ - removeNode(name: string): void; - - /** - * Check if a node exists in the graph. - * @param {string} name - */ - hasNode(name: string): boolean; - - /** - * Get the data associated with a node (will throw an Error if the node does not exist). - * @param {string} name - */ - getNodeData(name: string): T; - - /** - * Set the data for an existing node (will throw an Error if the node does not exist). - * @param {string} name - * @param data - */ - setNodeData(name: string, data?: T): void; - - /** - * Add a dependency between two nodes (will throw an Error if one of the nodes does not exist). - * @param {string} from - * @param {string} to - */ - addDependency(from: string, to: string): void; - - /** - * Remove a dependency between two nodes. - * @param {string} from - * @param {string} to - */ - removeDependency(from: string, to: string): void; - - /** - * Return a clone of the dependency graph (If any custom data is attached - * to the nodes, it will only be shallow copied). - */ - clone(): DepGraph; - - /** - * Get an array containing the direct dependency nodes of the specified node. - * @param name - */ - directDependenciesOf(name: string): string[]; - - /** - * Get an array containing the nodes that directly depend on the specified node. - * @param name - */ - directDependantsOf(name: string): string[]; - - /** - * Alias of `directDependantsOf` - * - * @see directDependantsOf - * @param {string} name - */ - directDependentsOf(name: string): string[]; - - /** - * Get an array containing the nodes that the specified node depends on (transitively). If leavesOnly is true, only nodes that do not depend on any other nodes will be returned in the array. - * @param {string} name - * @param {boolean} leavesOnly - */ - dependenciesOf(name: string, leavesOnly?: boolean): string[]; - - /** - * Get an array containing the nodes that depend on the specified node (transitively). If leavesOnly is true, only nodes that do not have any dependants will be returned in the array. - * @param {string} name - * @param {boolean} leavesOnly - */ - dependantsOf(name: string, leavesOnly?: boolean): string[]; - - /** - * Alias of `dependantsOf` - * - * @see dependantsOf - * @param name - * @param leavesOnly - */ - dependentsOf(name: string, leavesOnly?: boolean): string[]; - - /** - * Get an array of nodes that have no dependants (i.e. nothing depends on them). - */ - entryNodes(): string[]; - - /** - * Construct the overall processing order for the dependency graph. If leavesOnly is true, only nodes that do not depend on any other nodes will be returned. - * @param {boolean} leavesOnly - */ - overallOrder(leavesOnly?: boolean): string[]; - } - - export class DepGraphCycleError extends Error { - cyclePath: string[]; - } -} diff --git a/node_modules/dependency-graph/package.json b/node_modules/dependency-graph/package.json deleted file mode 100755 index d965899..0000000 --- a/node_modules/dependency-graph/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "dependency-graph", - "description": "Simple dependency graph.", - "version": "0.11.0", - "author": "Jim Riecken ", - "keywords": [ - "dependency", - "graph" - ], - "license": "MIT", - "repository": { - "type": "git", - "url": "git://github.com/jriecken/dependency-graph.git" - }, - "bugs": { - "url": "http://github.com/jriecken/dependency-graph/issues" - }, - "main": "./lib/dep_graph.js", - "scripts": { - "test": "jasmine specs/**/*.js" - }, - "dependencies": {}, - "optionalDependencies": {}, - "devDependencies": { - "jasmine": "3.5.0" - }, - "engines": { - "node": ">= 0.6.0" - }, - "types": "./lib/index.d.ts" -} \ No newline at end of file diff --git a/node_modules/dependency-graph/specs/dep_graph_spec.js b/node_modules/dependency-graph/specs/dep_graph_spec.js deleted file mode 100755 index 07aa68e..0000000 --- a/node_modules/dependency-graph/specs/dep_graph_spec.js +++ /dev/null @@ -1,542 +0,0 @@ -var dep_graph = require("../lib/dep_graph"); -var DepGraph = dep_graph.DepGraph; - -describe("DepGraph", function () { - it("should be able to add/remove nodes", function () { - var graph = new DepGraph(); - - graph.addNode("Foo"); - graph.addNode("Bar"); - - expect(graph.hasNode("Foo")).toBeTrue(); - expect(graph.hasNode("Bar")).toBeTrue(); - expect(graph.hasNode("NotThere")).toBeFalse(); - - graph.removeNode("Bar"); - - expect(graph.hasNode("Bar")).toBeFalse(); - }); - - it("should calculate its size", function () { - var graph = new DepGraph(); - - expect(graph.size()).toBe(0); - - graph.addNode("Foo"); - graph.addNode("Bar"); - - expect(graph.size()).toBe(2); - - graph.removeNode("Bar"); - - expect(graph.size()).toBe(1); - }); - - it("should treat the node data parameter as optional and use the node name as data if node data was not given", function () { - var graph = new DepGraph(); - - graph.addNode("Foo"); - - expect(graph.getNodeData("Foo")).toBe("Foo"); - }); - - it("should be able to associate a node name with data on node add", function () { - var graph = new DepGraph(); - - graph.addNode("Foo", "data"); - - expect(graph.getNodeData("Foo")).toBe("data"); - }); - - it("should be able to add undefined as node data", function () { - var graph = new DepGraph(); - - graph.addNode("Foo", undefined); - - expect(graph.getNodeData("Foo")).toBeUndefined(); - }); - - it("should return true when using hasNode with a node which has falsy data", function () { - var graph = new DepGraph(); - - var falsyData = ["", 0, null, undefined, false]; - graph.addNode("Foo"); - - falsyData.forEach(function (data) { - graph.setNodeData("Foo", data); - - expect(graph.hasNode("Foo")).toBeTrue(); - - // Just an extra check to make sure that the saved data is correct - expect(graph.getNodeData("Foo")).toBe(data); - }); - }); - - it("should be able to set data after a node was added", function () { - var graph = new DepGraph(); - - graph.addNode("Foo", "data"); - graph.setNodeData("Foo", "data2"); - - expect(graph.getNodeData("Foo")).toBe("data2"); - }); - - it("should throw an error if we try to set data for a non-existing node", function () { - var graph = new DepGraph(); - - expect(function () { - graph.setNodeData("Foo", "data"); - }).toThrow(new Error("Node does not exist: Foo")); - }); - - it("should throw an error if the node does not exists and we try to get data", function () { - var graph = new DepGraph(); - - expect(function () { - graph.getNodeData("Foo"); - }).toThrow(new Error("Node does not exist: Foo")); - }); - - it("should do nothing if creating a node that already exists", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - - graph.addDependency("a", "b"); - - graph.addNode("a"); - - expect(graph.dependenciesOf("a")).toEqual(["b"]); - }); - - it("should do nothing if removing a node that does not exist", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - expect(graph.hasNode("a")).toBeTrue(); - - graph.removeNode("a"); - expect(graph.hasNode("Foo")).toBeFalse(); - - graph.removeNode("a"); - expect(graph.hasNode("Foo")).toBeFalse(); - }); - - it("should be able to add dependencies between nodes", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - - graph.addDependency("a", "b"); - graph.addDependency("a", "c"); - - expect(graph.dependenciesOf("a")).toEqual(["b", "c"]); - }); - - it("should find entry nodes", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - - graph.addDependency("a", "b"); - graph.addDependency("a", "c"); - - expect(graph.entryNodes()).toEqual(["a"]); - }); - - it("should throw an error if a node does not exist and a dependency is added", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - - expect(function () { - graph.addDependency("a", "b"); - }).toThrow(new Error("Node does not exist: b")); - }); - - it("should detect cycles", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("c", "a"); - graph.addDependency("d", "a"); - - expect(function () { - graph.dependenciesOf("b"); - }).toThrow(new dep_graph.DepGraphCycleError(["b", "c", "a", "b"])); - }); - - it("should allow cycles when configured", function () { - var graph = new DepGraph({ circular: true }); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("c", "a"); - graph.addDependency("d", "a"); - - expect(graph.dependenciesOf("b")).toEqual(["a", "c"]); - expect(graph.overallOrder()).toEqual(["c", "b", "a", "d"]); - }); - - it( - "should include all nodes in overall order even from " + - "cycles in disconnected subgraphs when circular is true", - function () { - var graph = new DepGraph({ circular: true }); - - graph.addNode("2a"); - graph.addNode("2b"); - graph.addNode("2c"); - graph.addDependency("2a", "2b"); - graph.addDependency("2b", "2c"); - graph.addDependency("2c", "2a"); - - graph.addNode("1a"); - graph.addNode("1b"); - graph.addNode("1c"); - graph.addNode("1d"); - graph.addNode("1e"); - - graph.addDependency("1a", "1b"); - graph.addDependency("1a", "1c"); - graph.addDependency("1b", "1c"); - graph.addDependency("1c", "1d"); - - expect(graph.overallOrder()).toEqual([ - "1d", - "1c", - "1b", - "1a", - "1e", - "2c", - "2b", - "2a" - ]); - } - ); - - it("should detect cycles in overall order", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("c", "a"); - graph.addDependency("d", "a"); - - expect(function () { - graph.overallOrder(); - }).toThrow(new dep_graph.DepGraphCycleError(["a", "b", "c", "a"])); - }); - - it("should detect cycles in overall order when all nodes have dependants (incoming edges)", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("c", "a"); - - expect(function () { - graph.overallOrder(); - }).toThrow(new dep_graph.DepGraphCycleError(["a", "b", "c", "a"])); - }); - - it( - "should detect cycles in overall order when there are several " + - "disconnected subgraphs (with one that does not have a cycle", - function () { - var graph = new DepGraph(); - - graph.addNode("a_1"); - graph.addNode("a_2"); - graph.addNode("b_1"); - graph.addNode("b_2"); - graph.addNode("b_3"); - - graph.addDependency("a_1", "a_2"); - graph.addDependency("b_1", "b_2"); - graph.addDependency("b_2", "b_3"); - graph.addDependency("b_3", "b_1"); - - expect(function () { - graph.overallOrder(); - }).toThrow( - new dep_graph.DepGraphCycleError(["b_1", "b_2", "b_3", "b_1"]) - ); - } - ); - - it("should retrieve dependencies and dependants in the correct order", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - - graph.addDependency("a", "d"); - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("d", "b"); - - expect(graph.dependenciesOf("a")).toEqual(["c", "b", "d"]); - expect(graph.dependenciesOf("b")).toEqual(["c"]); - expect(graph.dependenciesOf("c")).toEqual([]); - expect(graph.dependenciesOf("d")).toEqual(["c", "b"]); - - expect(graph.dependantsOf("a")).toEqual([]); - expect(graph.dependantsOf("b")).toEqual(["a", "d"]); - expect(graph.dependantsOf("c")).toEqual(["a", "d", "b"]); - expect(graph.dependantsOf("d")).toEqual(["a"]); - - // check the alias "dependentsOf" - expect(graph.dependentsOf("a")).toEqual([]); - expect(graph.dependentsOf("b")).toEqual(["a", "d"]); - expect(graph.dependentsOf("c")).toEqual(["a", "d", "b"]); - expect(graph.dependentsOf("d")).toEqual(["a"]); - }); - - it("should be able to retrieve direct dependencies/dependants", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - - graph.addDependency("a", "d"); - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - graph.addDependency("d", "b"); - - expect(graph.directDependenciesOf("a")).toEqual(["d", "b"]); - expect(graph.directDependenciesOf("b")).toEqual(["c"]); - expect(graph.directDependenciesOf("c")).toEqual([]); - expect(graph.directDependenciesOf("d")).toEqual(["b"]); - - expect(graph.directDependantsOf("a")).toEqual([]); - expect(graph.directDependantsOf("b")).toEqual(["a", "d"]); - expect(graph.directDependantsOf("c")).toEqual(["b"]); - expect(graph.directDependantsOf("d")).toEqual(["a"]); - - // check the alias "directDependentsOf" - expect(graph.directDependentsOf("a")).toEqual([]); - expect(graph.directDependentsOf("b")).toEqual(["a", "d"]); - expect(graph.directDependentsOf("c")).toEqual(["b"]); - expect(graph.directDependentsOf("d")).toEqual(["a"]); - }); - - it("should be able to resolve the overall order of things", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - graph.addNode("e"); - - graph.addDependency("a", "b"); - graph.addDependency("a", "c"); - graph.addDependency("b", "c"); - graph.addDependency("c", "d"); - - expect(graph.overallOrder()).toEqual(["d", "c", "b", "a", "e"]); - }); - - it('should be able to only retrieve the "leaves" in the overall order', function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addNode("d"); - graph.addNode("e"); - - graph.addDependency("a", "b"); - graph.addDependency("a", "c"); - graph.addDependency("b", "c"); - graph.addDependency("c", "d"); - - expect(graph.overallOrder(true)).toEqual(["d", "e"]); - }); - - it("should be able to give the overall order for a graph with several disconnected subgraphs", function () { - var graph = new DepGraph(); - - graph.addNode("a_1"); - graph.addNode("a_2"); - graph.addNode("b_1"); - graph.addNode("b_2"); - graph.addNode("b_3"); - - graph.addDependency("a_1", "a_2"); - graph.addDependency("b_1", "b_2"); - graph.addDependency("b_2", "b_3"); - - expect(graph.overallOrder()).toEqual(["a_2", "a_1", "b_3", "b_2", "b_1"]); - }); - - it("should give an empty overall order for an empty graph", function () { - var graph = new DepGraph(); - - expect(graph.overallOrder()).toEqual([]); - }); - - it("should still work after nodes are removed", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - - expect(graph.dependenciesOf("a")).toEqual(["c", "b"]); - - graph.removeNode("c"); - - expect(graph.dependenciesOf("a")).toEqual(["b"]); - }); - - it("should clone an empty graph", function () { - var graph = new DepGraph(); - expect(graph.size()).toEqual(0); - var cloned = graph.clone(); - expect(cloned.size()).toEqual(0); - - expect(graph === cloned).toBeFalse(); - }); - - it("should clone a non-empty graph", function () { - var graph = new DepGraph(); - - graph.addNode("a"); - graph.addNode("b"); - graph.addNode("c"); - graph.addDependency("a", "b"); - graph.addDependency("b", "c"); - - var cloned = graph.clone(); - - expect(graph === cloned).toBeFalse(); - expect(cloned.hasNode("a")).toBeTrue(); - expect(cloned.hasNode("b")).toBeTrue(); - expect(cloned.hasNode("c")).toBeTrue(); - expect(cloned.dependenciesOf("a")).toEqual(["c", "b"]); - expect(cloned.dependantsOf("c")).toEqual(["a", "b"]); - - // Changes to the original graph shouldn't affect the clone - graph.removeNode("c"); - expect(graph.dependenciesOf("a")).toEqual(["b"]); - expect(cloned.dependenciesOf("a")).toEqual(["c", "b"]); - - graph.addNode("d"); - graph.addDependency("b", "d"); - expect(graph.dependenciesOf("a")).toEqual(["d", "b"]); - expect(cloned.dependenciesOf("a")).toEqual(["c", "b"]); - }); - - it("should only be a shallow clone", function () { - var graph = new DepGraph(); - - var data = { a: 42 }; - graph.addNode("a", data); - - var cloned = graph.clone(); - expect(graph === cloned).toBeFalse(); - expect(graph.getNodeData("a") === cloned.getNodeData("a")).toBeTrue(); - - graph.getNodeData("a").a = 43; - expect(cloned.getNodeData("a").a).toBe(43); - - cloned.setNodeData("a", { a: 42 }); - expect(cloned.getNodeData("a").a).toBe(42); - expect(graph.getNodeData("a") === cloned.getNodeData("a")).toBeFalse(); - }); -}); - -describe("DepGraph Performance", function () { - it("should not exceed max call stack with a very deep graph", function () { - var g = new DepGraph(); - var expected = []; - for (var i = 0; i < 100000; i++) { - var istr = i.toString(); - g.addNode(istr); - expected.push(istr); - if (i > 0) { - g.addDependency(istr, (i - 1).toString()); - } - } - var order = g.overallOrder(); - expect(order).toEqual(expected); - }); - - it("should run an a reasonable amount of time for a very large graph", function () { - var randInt = function (min, max) { - return Math.floor(Math.random() * (max - min + 1)) + min; - }; - var g = new DepGraph(); - var nodes = []; - // Create a graph with 100000 nodes in it with 10 random connections to - // lower numbered nodes - for (var i = 0; i < 100000; i++) { - nodes.push(i.toString()); - g.addNode(i.toString()); - for (var j = 0; j < 10; j++) { - var dep = randInt(0, i); - if (i !== dep) { - g.addDependency(i.toString(), dep.toString()); - } - } - } - var start = new Date().getTime(); - g.overallOrder(); - var end = new Date().getTime(); - expect(start - end).toBeLessThan(1000); - }); -}); - -describe("DepGraphCycleError", function () { - var DepGraphCycleError = dep_graph.DepGraphCycleError; - - it("should have a message", function () { - var err = new DepGraphCycleError(["a", "b", "c", "a"]); - expect(err.message).toEqual("Dependency Cycle Found: a -> b -> c -> a"); - }); - - it("should be an instanceof DepGraphCycleError", function () { - var err = new DepGraphCycleError(["a", "b", "c", "a"]); - expect(err instanceof DepGraphCycleError).toBeTrue(); - expect(err instanceof Error).toBeTrue(); - }); - - it("should have a cyclePath", function () { - var cyclePath = ["a", "b", "c", "a"]; - var err = new DepGraphCycleError(cyclePath); - expect(err.cyclePath).toEqual(cyclePath); - }); -}); diff --git a/node_modules/emoji-regex/LICENSE-MIT.txt b/node_modules/emoji-regex/LICENSE-MIT.txt deleted file mode 100644 index a41e0a7..0000000 --- a/node_modules/emoji-regex/LICENSE-MIT.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright Mathias Bynens - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/emoji-regex/README.md b/node_modules/emoji-regex/README.md deleted file mode 100644 index f10e173..0000000 --- a/node_modules/emoji-regex/README.md +++ /dev/null @@ -1,73 +0,0 @@ -# emoji-regex [![Build status](https://travis-ci.org/mathiasbynens/emoji-regex.svg?branch=master)](https://travis-ci.org/mathiasbynens/emoji-regex) - -_emoji-regex_ offers a regular expression to match all emoji symbols (including textual representations of emoji) as per the Unicode Standard. - -This repository contains a script that generates this regular expression based on [the data from Unicode v12](https://github.com/mathiasbynens/unicode-12.0.0). Because of this, the regular expression can easily be updated whenever new emoji are added to the Unicode standard. - -## Installation - -Via [npm](https://www.npmjs.com/): - -```bash -npm install emoji-regex -``` - -In [Node.js](https://nodejs.org/): - -```js -const emojiRegex = require('emoji-regex'); -// Note: because the regular expression has the global flag set, this module -// exports a function that returns the regex rather than exporting the regular -// expression itself, to make it impossible to (accidentally) mutate the -// original regular expression. - -const text = ` -\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation) -\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji -\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base) -\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier -`; - -const regex = emojiRegex(); -let match; -while (match = regex.exec(text)) { - const emoji = match[0]; - console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`); -} -``` - -Console output: - -``` -Matched sequence ⌚ — code points: 1 -Matched sequence ⌚ — code points: 1 -Matched sequence ↔️ — code points: 2 -Matched sequence ↔️ — code points: 2 -Matched sequence 👩 — code points: 1 -Matched sequence 👩 — code points: 1 -Matched sequence 👩🏿 — code points: 2 -Matched sequence 👩🏿 — code points: 2 -``` - -To match emoji in their textual representation as well (i.e. emoji that are not `Emoji_Presentation` symbols and that aren’t forced to render as emoji by a variation selector), `require` the other regex: - -```js -const emojiRegex = require('emoji-regex/text.js'); -``` - -Additionally, in environments which support ES2015 Unicode escapes, you may `require` ES2015-style versions of the regexes: - -```js -const emojiRegex = require('emoji-regex/es2015/index.js'); -const emojiRegexText = require('emoji-regex/es2015/text.js'); -``` - -## Author - -| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | -|---| -| [Mathias Bynens](https://mathiasbynens.be/) | - -## License - -_emoji-regex_ is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/emoji-regex/es2015/index.js b/node_modules/emoji-regex/es2015/index.js deleted file mode 100644 index b4cf3dc..0000000 --- a/node_modules/emoji-regex/es2015/index.js +++ /dev/null @@ -1,6 +0,0 @@ -"use strict"; - -module.exports = () => { - // https://mths.be/emoji - return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; -}; diff --git a/node_modules/emoji-regex/es2015/text.js b/node_modules/emoji-regex/es2015/text.js deleted file mode 100644 index 780309d..0000000 --- a/node_modules/emoji-regex/es2015/text.js +++ /dev/null @@ -1,6 +0,0 @@ -"use strict"; - -module.exports = () => { - // https://mths.be/emoji - return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F?|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; -}; diff --git a/node_modules/emoji-regex/index.d.ts b/node_modules/emoji-regex/index.d.ts deleted file mode 100644 index 1955b47..0000000 --- a/node_modules/emoji-regex/index.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -declare module 'emoji-regex' { - function emojiRegex(): RegExp; - - export default emojiRegex; -} - -declare module 'emoji-regex/text' { - function emojiRegex(): RegExp; - - export default emojiRegex; -} - -declare module 'emoji-regex/es2015' { - function emojiRegex(): RegExp; - - export default emojiRegex; -} - -declare module 'emoji-regex/es2015/text' { - function emojiRegex(): RegExp; - - export default emojiRegex; -} diff --git a/node_modules/emoji-regex/index.js b/node_modules/emoji-regex/index.js deleted file mode 100644 index d993a3a..0000000 --- a/node_modules/emoji-regex/index.js +++ /dev/null @@ -1,6 +0,0 @@ -"use strict"; - -module.exports = function () { - // https://mths.be/emoji - return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; -}; diff --git a/node_modules/emoji-regex/package.json b/node_modules/emoji-regex/package.json deleted file mode 100644 index 6d32352..0000000 --- a/node_modules/emoji-regex/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "emoji-regex", - "version": "8.0.0", - "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.", - "homepage": "https://mths.be/emoji-regex", - "main": "index.js", - "types": "index.d.ts", - "keywords": [ - "unicode", - "regex", - "regexp", - "regular expressions", - "code points", - "symbols", - "characters", - "emoji" - ], - "license": "MIT", - "author": { - "name": "Mathias Bynens", - "url": "https://mathiasbynens.be/" - }, - "repository": { - "type": "git", - "url": "https://github.com/mathiasbynens/emoji-regex.git" - }, - "bugs": "https://github.com/mathiasbynens/emoji-regex/issues", - "files": [ - "LICENSE-MIT.txt", - "index.js", - "index.d.ts", - "text.js", - "es2015/index.js", - "es2015/text.js" - ], - "scripts": { - "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src -d ./es2015; node script/inject-sequences.js", - "test": "mocha", - "test:watch": "npm run test -- --watch" - }, - "devDependencies": { - "@babel/cli": "^7.2.3", - "@babel/core": "^7.3.4", - "@babel/plugin-proposal-unicode-property-regex": "^7.2.0", - "@babel/preset-env": "^7.3.4", - "mocha": "^6.0.2", - "regexgen": "^1.3.0", - "unicode-12.0.0": "^0.7.9" - } -} diff --git a/node_modules/emoji-regex/text.js b/node_modules/emoji-regex/text.js deleted file mode 100644 index 0a55ce2..0000000 --- a/node_modules/emoji-regex/text.js +++ /dev/null @@ -1,6 +0,0 @@ -"use strict"; - -module.exports = function () { - // https://mths.be/emoji - return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F?|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; -}; diff --git a/node_modules/escalade/dist/index.js b/node_modules/escalade/dist/index.js deleted file mode 100644 index ad236c4..0000000 --- a/node_modules/escalade/dist/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const { dirname, resolve } = require('path'); -const { readdir, stat } = require('fs'); -const { promisify } = require('util'); - -const toStats = promisify(stat); -const toRead = promisify(readdir); - -module.exports = async function (start, callback) { - let dir = resolve('.', start); - let tmp, stats = await toStats(dir); - - if (!stats.isDirectory()) { - dir = dirname(dir); - } - - while (true) { - tmp = await callback(dir, await toRead(dir)); - if (tmp) return resolve(dir, tmp); - dir = dirname(tmp = dir); - if (tmp === dir) break; - } -} diff --git a/node_modules/escalade/dist/index.mjs b/node_modules/escalade/dist/index.mjs deleted file mode 100644 index bf95be0..0000000 --- a/node_modules/escalade/dist/index.mjs +++ /dev/null @@ -1,22 +0,0 @@ -import { dirname, resolve } from 'path'; -import { readdir, stat } from 'fs'; -import { promisify } from 'util'; - -const toStats = promisify(stat); -const toRead = promisify(readdir); - -export default async function (start, callback) { - let dir = resolve('.', start); - let tmp, stats = await toStats(dir); - - if (!stats.isDirectory()) { - dir = dirname(dir); - } - - while (true) { - tmp = await callback(dir, await toRead(dir)); - if (tmp) return resolve(dir, tmp); - dir = dirname(tmp = dir); - if (tmp === dir) break; - } -} diff --git a/node_modules/escalade/index.d.mts b/node_modules/escalade/index.d.mts deleted file mode 100644 index 550699c..0000000 --- a/node_modules/escalade/index.d.mts +++ /dev/null @@ -1,11 +0,0 @@ -type Promisable = T | Promise; - -export type Callback = ( - directory: string, - files: string[], -) => Promisable; - -export default function ( - directory: string, - callback: Callback, -): Promise; diff --git a/node_modules/escalade/index.d.ts b/node_modules/escalade/index.d.ts deleted file mode 100644 index 26c58f2..0000000 --- a/node_modules/escalade/index.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -type Promisable = T | Promise; - -declare namespace escalade { - export type Callback = ( - directory: string, - files: string[], - ) => Promisable; -} - -declare function escalade( - directory: string, - callback: escalade.Callback, -): Promise; - -export = escalade; diff --git a/node_modules/escalade/license b/node_modules/escalade/license deleted file mode 100644 index fa6089f..0000000 --- a/node_modules/escalade/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Luke Edwards (lukeed.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/escalade/package.json b/node_modules/escalade/package.json deleted file mode 100644 index 1eed4f9..0000000 --- a/node_modules/escalade/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "escalade", - "version": "3.2.0", - "repository": "lukeed/escalade", - "description": "A tiny (183B to 210B) and fast utility to ascend parent directories", - "module": "dist/index.mjs", - "main": "dist/index.js", - "types": "index.d.ts", - "license": "MIT", - "author": { - "name": "Luke Edwards", - "email": "luke.edwards05@gmail.com", - "url": "https://lukeed.com" - }, - "exports": { - ".": [ - { - "import": { - "types": "./index.d.mts", - "default": "./dist/index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./dist/index.js" - } - }, - "./dist/index.js" - ], - "./sync": [ - { - "import": { - "types": "./sync/index.d.mts", - "default": "./sync/index.mjs" - }, - "require": { - "types": "./sync/index.d.ts", - "default": "./sync/index.js" - } - }, - "./sync/index.js" - ] - }, - "files": [ - "*.d.mts", - "*.d.ts", - "dist", - "sync" - ], - "modes": { - "sync": "src/sync.js", - "default": "src/async.js" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "build": "bundt", - "pretest": "npm run build", - "test": "uvu -r esm test -i fixtures" - }, - "keywords": [ - "find", - "parent", - "parents", - "directory", - "search", - "walk" - ], - "devDependencies": { - "bundt": "1.1.1", - "esm": "3.2.25", - "uvu": "0.3.3" - } -} diff --git a/node_modules/escalade/readme.md b/node_modules/escalade/readme.md deleted file mode 100644 index e07ee0d..0000000 --- a/node_modules/escalade/readme.md +++ /dev/null @@ -1,211 +0,0 @@ -# escalade [![CI](https://github.com/lukeed/escalade/workflows/CI/badge.svg)](https://github.com/lukeed/escalade/actions) [![licenses](https://licenses.dev/b/npm/escalade)](https://licenses.dev/npm/escalade) [![codecov](https://badgen.now.sh/codecov/c/github/lukeed/escalade)](https://codecov.io/gh/lukeed/escalade) - -> A tiny (183B to 210B) and [fast](#benchmarks) utility to ascend parent directories - -With [escalade](https://en.wikipedia.org/wiki/Escalade), you can scale parent directories until you've found what you're looking for.
Given an input file or directory, `escalade` will continue executing your callback function until either: - -1) the callback returns a truthy value -2) `escalade` has reached the system root directory (eg, `/`) - -> **Important:**
Please note that `escalade` only deals with direct ancestry – it will not dive into parents' sibling directories. - ---- - -**Notice:** As of v3.1.0, `escalade` now includes [Deno support](http://deno.land/x/escalade)! Please see [Deno Usage](#deno) below. - ---- - -## Install - -``` -$ npm install --save escalade -``` - - -## Modes - -There are two "versions" of `escalade` available: - -#### "async" -> **Node.js:** >= 8.x
-> **Size (gzip):** 210 bytes
-> **Availability:** [CommonJS](https://unpkg.com/escalade/dist/index.js), [ES Module](https://unpkg.com/escalade/dist/index.mjs) - -This is the primary/default mode. It makes use of `async`/`await` and [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original). - -#### "sync" -> **Node.js:** >= 6.x
-> **Size (gzip):** 183 bytes
-> **Availability:** [CommonJS](https://unpkg.com/escalade/sync/index.js), [ES Module](https://unpkg.com/escalade/sync/index.mjs) - -This is the opt-in mode, ideal for scenarios where `async` usage cannot be supported. - - -## Usage - -***Example Structure*** - -``` -/Users/lukeed - └── oss - ├── license - └── escalade - ├── package.json - └── test - └── fixtures - ├── index.js - └── foobar - └── demo.js -``` - -***Example Usage*** - -```js -//~> demo.js -import { join } from 'path'; -import escalade from 'escalade'; - -const input = join(__dirname, 'demo.js'); -// or: const input = __dirname; - -const pkg = await escalade(input, (dir, names) => { - console.log('~> dir:', dir); - console.log('~> names:', names); - console.log('---'); - - if (names.includes('package.json')) { - // will be resolved into absolute - return 'package.json'; - } -}); - -//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar -//~> names: ['demo.js'] -//--- -//~> dir: /Users/lukeed/oss/escalade/test/fixtures -//~> names: ['index.js', 'foobar'] -//--- -//~> dir: /Users/lukeed/oss/escalade/test -//~> names: ['fixtures'] -//--- -//~> dir: /Users/lukeed/oss/escalade -//~> names: ['package.json', 'test'] -//--- - -console.log(pkg); -//=> /Users/lukeed/oss/escalade/package.json - -// Now search for "missing123.txt" -// (Assume it doesn't exist anywhere!) -const missing = await escalade(input, (dir, names) => { - console.log('~> dir:', dir); - return names.includes('missing123.txt') && 'missing123.txt'; -}); - -//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar -//~> dir: /Users/lukeed/oss/escalade/test/fixtures -//~> dir: /Users/lukeed/oss/escalade/test -//~> dir: /Users/lukeed/oss/escalade -//~> dir: /Users/lukeed/oss -//~> dir: /Users/lukeed -//~> dir: /Users -//~> dir: / - -console.log(missing); -//=> undefined -``` - -> **Note:** To run the above example with "sync" mode, import from `escalade/sync` and remove the `await` keyword. - - -## API - -### escalade(input, callback) -Returns: `string|void` or `Promise` - -When your `callback` locates a file, `escalade` will resolve/return with an absolute path.
-If your `callback` was never satisfied, then `escalade` will resolve/return with nothing (undefined). - -> **Important:**
The `sync` and `async` versions share the same API.
The **only** difference is that `sync` is not Promise-based. - -#### input -Type: `string` - -The path from which to start ascending. - -This may be a file or a directory path.
However, when `input` is a file, `escalade` will begin with its parent directory. - -> **Important:** Unless given an absolute path, `input` will be resolved from `process.cwd()` location. - -#### callback -Type: `Function` - -The callback to execute for each ancestry level. It always is given two arguments: - -1) `dir` - an absolute path of the current parent directory -2) `names` - a list (`string[]`) of contents _relative to_ the `dir` parent - -> **Note:** The `names` list can contain names of files _and_ directories. - -When your callback returns a _falsey_ value, then `escalade` will continue with `dir`'s parent directory, re-invoking your callback with new argument values. - -When your callback returns a string, then `escalade` stops iteration immediately.
-If the string is an absolute path, then it's left as is. Otherwise, the string is resolved into an absolute path _from_ the `dir` that housed the satisfying condition. - -> **Important:** Your `callback` can be a `Promise/AsyncFunction` when using the "async" version of `escalade`. - -## Benchmarks - -> Running on Node.js v10.13.0 - -``` -# Load Time - find-up 3.891ms - escalade 0.485ms - escalade/sync 0.309ms - -# Levels: 6 (target = "foo.txt"): - find-up x 24,856 ops/sec ±6.46% (55 runs sampled) - escalade x 73,084 ops/sec ±4.23% (73 runs sampled) - find-up.sync x 3,663 ops/sec ±1.12% (83 runs sampled) - escalade/sync x 9,360 ops/sec ±0.62% (88 runs sampled) - -# Levels: 12 (target = "package.json"): - find-up x 29,300 ops/sec ±10.68% (70 runs sampled) - escalade x 73,685 ops/sec ± 5.66% (66 runs sampled) - find-up.sync x 1,707 ops/sec ± 0.58% (91 runs sampled) - escalade/sync x 4,667 ops/sec ± 0.68% (94 runs sampled) - -# Levels: 18 (target = "missing123.txt"): - find-up x 21,818 ops/sec ±17.37% (14 runs sampled) - escalade x 67,101 ops/sec ±21.60% (20 runs sampled) - find-up.sync x 1,037 ops/sec ± 2.86% (88 runs sampled) - escalade/sync x 1,248 ops/sec ± 0.50% (93 runs sampled) -``` - -## Deno - -As of v3.1.0, `escalade` is available on the Deno registry. - -Please note that the [API](#api) is identical and that there are still [two modes](#modes) from which to choose: - -```ts -// Choose "async" mode -import escalade from 'https://deno.land/escalade/async.ts'; - -// Choose "sync" mode -import escalade from 'https://deno.land/escalade/sync.ts'; -``` - -> **Important:** The `allow-read` permission is required! - - -## Related - -- [premove](https://github.com/lukeed/premove) - A tiny (247B) utility to remove items recursively -- [totalist](https://github.com/lukeed/totalist) - A tiny (195B to 224B) utility to recursively list all (total) files in a directory -- [mk-dirs](https://github.com/lukeed/mk-dirs) - A tiny (420B) utility to make a directory and its parents, recursively - -## License - -MIT © [Luke Edwards](https://lukeed.com) diff --git a/node_modules/escalade/sync/index.d.mts b/node_modules/escalade/sync/index.d.mts deleted file mode 100644 index c023d37..0000000 --- a/node_modules/escalade/sync/index.d.mts +++ /dev/null @@ -1,9 +0,0 @@ -export type Callback = ( - directory: string, - files: string[], -) => string | false | void; - -export default function ( - directory: string, - callback: Callback, -): string | void; diff --git a/node_modules/escalade/sync/index.d.ts b/node_modules/escalade/sync/index.d.ts deleted file mode 100644 index 9d5b589..0000000 --- a/node_modules/escalade/sync/index.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -declare namespace escalade { - export type Callback = ( - directory: string, - files: string[], - ) => string | false | void; -} - -declare function escalade( - directory: string, - callback: escalade.Callback, -): string | void; - -export = escalade; diff --git a/node_modules/escalade/sync/index.js b/node_modules/escalade/sync/index.js deleted file mode 100644 index 902cc46..0000000 --- a/node_modules/escalade/sync/index.js +++ /dev/null @@ -1,18 +0,0 @@ -const { dirname, resolve } = require('path'); -const { readdirSync, statSync } = require('fs'); - -module.exports = function (start, callback) { - let dir = resolve('.', start); - let tmp, stats = statSync(dir); - - if (!stats.isDirectory()) { - dir = dirname(dir); - } - - while (true) { - tmp = callback(dir, readdirSync(dir)); - if (tmp) return resolve(dir, tmp); - dir = dirname(tmp = dir); - if (tmp === dir) break; - } -} diff --git a/node_modules/escalade/sync/index.mjs b/node_modules/escalade/sync/index.mjs deleted file mode 100644 index 3cdc5bd..0000000 --- a/node_modules/escalade/sync/index.mjs +++ /dev/null @@ -1,18 +0,0 @@ -import { dirname, resolve } from 'path'; -import { readdirSync, statSync } from 'fs'; - -export default function (start, callback) { - let dir = resolve('.', start); - let tmp, stats = statSync(dir); - - if (!stats.isDirectory()) { - dir = dirname(dir); - } - - while (true) { - tmp = callback(dir, readdirSync(dir)); - if (tmp) return resolve(dir, tmp); - dir = dirname(tmp = dir); - if (tmp === dir) break; - } -} diff --git a/node_modules/fast-glob/LICENSE b/node_modules/fast-glob/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/fast-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/fast-glob/README.md b/node_modules/fast-glob/README.md deleted file mode 100644 index 62d5cb7..0000000 --- a/node_modules/fast-glob/README.md +++ /dev/null @@ -1,830 +0,0 @@ -# fast-glob - -> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. - -This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. - -## Table of Contents - -
-Details - -* [Highlights](#highlights) -* [Old and modern mode](#old-and-modern-mode) -* [Pattern syntax](#pattern-syntax) - * [Basic syntax](#basic-syntax) - * [Advanced syntax](#advanced-syntax) -* [Installation](#installation) -* [API](#api) - * [Asynchronous](#asynchronous) - * [Synchronous](#synchronous) - * [Stream](#stream) - * [patterns](#patterns) - * [[options]](#options) - * [Helpers](#helpers) - * [generateTasks](#generatetaskspatterns-options) - * [isDynamicPattern](#isdynamicpatternpattern-options) - * [escapePath](#escapepathpath) - * [convertPathToPattern](#convertpathtopatternpath) -* [Options](#options-3) - * [Common](#common) - * [concurrency](#concurrency) - * [cwd](#cwd) - * [deep](#deep) - * [followSymbolicLinks](#followsymboliclinks) - * [fs](#fs) - * [ignore](#ignore) - * [suppressErrors](#suppresserrors) - * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) - * [Output control](#output-control) - * [absolute](#absolute) - * [markDirectories](#markdirectories) - * [objectMode](#objectmode) - * [onlyDirectories](#onlydirectories) - * [onlyFiles](#onlyfiles) - * [stats](#stats) - * [unique](#unique) - * [Matching control](#matching-control) - * [braceExpansion](#braceexpansion) - * [caseSensitiveMatch](#casesensitivematch) - * [dot](#dot) - * [extglob](#extglob) - * [globstar](#globstar) - * [baseNameMatch](#basenamematch) -* [FAQ](#faq) - * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) - * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) - * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) - * [How to use UNC path?](#how-to-use-unc-path) - * [Compatible with `node-glob`?](#compatible-with-node-glob) -* [Benchmarks](#benchmarks) - * [Server](#server) - * [Nettop](#nettop) -* [Changelog](#changelog) -* [License](#license) - -
- -## Highlights - -* Fast. Probably the fastest. -* Supports multiple and negative patterns. -* Synchronous, Promise and Stream API. -* Object mode. Can return more than just strings. -* Error-tolerant. - -## Old and modern mode - -This package works in two modes, depending on the environment in which it is used. - -* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. -* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. - -The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. - -## Pattern syntax - -> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. - -There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). - -> :book: This package uses [`micromatch`][micromatch] as a library for pattern matching. - -### Basic syntax - -* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). -* A double star or globstar (`**`) — matches zero or more directories. -* Question mark (`?`) – matches any single character except slashes (path separators). -* Sequence (`[seq]`) — matches any character in sequence. - -> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. - -Some examples: - -* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. -* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. -* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. - -### Advanced syntax - -* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. -* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). -* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). -* [Bash style brace expansions][micromatch_braces] (`{}`). -* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). -* [Regex groups][regular_expressions_brackets] (`(a|b)`). - -> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. - -Some examples: - -* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. -* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. -* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. -* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. - -## Installation - -```console -npm install fast-glob -``` - -## API - -### Asynchronous - -```js -fg(patterns, [options]) -fg.async(patterns, [options]) -fg.glob(patterns, [options]) -``` - -Returns a `Promise` with an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Synchronous - -```js -fg.sync(patterns, [options]) -fg.globSync(patterns, [options]) -``` - -Returns an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Stream - -```js -fg.stream(patterns, [options]) -fg.globStream(patterns, [options]) -``` - -Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. - -```js -const fg = require('fast-glob'); - -const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); - -for await (const entry of stream) { - // .editorconfig - // services/index.js -} -``` - -#### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -> :1234: [Pattern syntax](#pattern-syntax) -> -> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. - -#### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -### Helpers - -#### `generateTasks(patterns, [options])` - -Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). - -```js -fg.generateTasks('*'); - -[{ - base: '.', // Parent directory for all patterns inside this task - dynamic: true, // Dynamic or static patterns are in this task - patterns: ['*'], - positive: ['*'], - negative: [] -}] -``` - -##### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `isDynamicPattern(pattern, [options])` - -Returns `true` if the passed pattern is a dynamic pattern. - -> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - -```js -fg.isDynamicPattern('*'); // true -fg.isDynamicPattern('abc'); // false -``` - -##### pattern - -* Required: `true` -* Type: `string` - -Any correct pattern. - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `escapePath(path)` - -Returns the path with escaped special characters depending on the platform. - -* Posix: - * `*?|(){}[]`; - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * `\\` before non-special characters; -* Windows: - * `(){}[]` - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * Characters like `*?|` cannot be used in the path ([windows_naming_conventions][windows_naming_conventions]), so they will not be escaped; - -```js -fg.escapePath('!abc'); -// \\!abc -fg.escapePath('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac' -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.posix.escapePath('C:\\Program Files (x86)\\**\\*'); -// C:\\\\Program Files \\(x86\\)\\*\\*\\* -fg.win32.escapePath('C:\\Program Files (x86)\\**\\*'); -// Windows: C:\\Program Files \\(x86\\)\\**\\* -``` - -#### `convertPathToPattern(path)` - -Converts a path to a pattern depending on the platform, including special character escaping. - -* Posix. Works similarly to the `fg.posix.escapePath` method. -* Windows. Works similarly to the `fg.win32.escapePath` method, additionally converting backslashes to forward slashes in cases where they are not escape characters (`!()+@{}[]`). - -```js -fg.convertPathToPattern('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac'; -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.convertPathToPattern('C:/Program Files (x86)/**/*'); -// Posix: C:/Program Files \\(x86\\)/\\*\\*/\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.convertPathToPattern('C:\\Program Files (x86)\\**\\*'); -// Posix: C:\\\\Program Files \\(x86\\)\\*\\*\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.posix.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Posix: \\\\\\?\\\\c:\\\\Program Files \\(x86\\)/**/* (broken pattern) -fg.win32.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Windows: //?/c:/Program Files \\(x86\\)/**/* -``` - -## Options - -### Common options - -#### concurrency - -* Type: `number` -* Default: `os.cpus().length` - -Specifies the maximum number of concurrent requests from a reader to read directories. - -> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. - -
- -More details - -In Node, there are [two types of threads][nodejs_thread_pool]: Event Loop (code) and a Thread Pool (fs, dns, …). The thread pool size controlled by the `UV_THREADPOOL_SIZE` environment variable. Its default size is 4 ([documentation][libuv_thread_pool]). The pool is one for all tasks within a single Node process. - -Any code can make 4 real concurrent accesses to the file system. The rest of the FS requests will wait in the queue. - -> :book: Each new instance of FG in the same Node process will use the same Thread pool. - -But this package also has the `concurrency` option. This option allows you to control the number of concurrent accesses to the FS at the package level. By default, this package has a value equal to the number of cores available for the current Node process. This allows you to set a value smaller than the pool size (`concurrency: 1`) or, conversely, to prepare tasks for the pool queue more quickly (`concurrency: Number.POSITIVE_INFINITY`). - -So, in fact, this package can **only make 4 concurrent requests to the FS**. You can increase this value by using an environment variable (`UV_THREADPOOL_SIZE`), but in practice this does not give a multiple advantage. - -
- -#### cwd - -* Type: `string` -* Default: `process.cwd()` - -The current working directory in which to search. - -#### deep - -* Type: `number` -* Default: `Infinity` - -Specifies the maximum depth of a read directory relative to the start directory. - -For example, you have the following tree: - -```js -dir/ -└── one/ // 1 - └── two/ // 2 - └── file.js // 3 -``` - -```js -// With base directory -fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] -fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] - -// With cwd option -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] -``` - -> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. - -#### followSymbolicLinks - -* Type: `boolean` -* Default: `true` - -Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. - -> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. - -> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. - -#### fs - -* Type: `FileSystemAdapter` -* Default: `fs.*` - -Custom implementation of methods for working with the file system. - -```ts -export interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} -``` - -#### ignore - -* Type: `string[]` -* Default: `[]` - -An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. - -```js -dir/ -├── package-lock.json -└── package.json -``` - -```js -fg.sync(['*.json', '!package-lock.json']); // ['package.json'] -fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] -``` - -#### suppressErrors - -* Type: `boolean` -* Default: `false` - -By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. - -> :book: Can be useful when the directory has entries with a special level of access. - -#### throwErrorOnBrokenSymbolicLink - -* Type: `boolean` -* Default: `false` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -> :book: This option has no effect on errors when reading the symbolic link directory. - -### Output control - -#### absolute - -* Type: `boolean` -* Default: `false` - -Return the absolute path for entries. - -```js -fg.sync('*.js', { absolute: false }); // ['index.js'] -fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] -``` - -> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. - -#### markDirectories - -* Type: `boolean` -* Default: `false` - -Mark the directory path with the final slash. - -```js -fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] -fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] -``` - -#### objectMode - -* Type: `boolean` -* Default: `false` - -Returns objects (instead of strings) describing entries. - -```js -fg.sync('*', { objectMode: false }); // ['src/index.js'] -fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] -``` - -The object has the following fields: - -* name (`string`) — the last part of the path (basename) -* path (`string`) — full path relative to the pattern base directory -* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` - -> :book: An object is an internal representation of entry, so getting it does not affect performance. - -#### onlyDirectories - -* Type: `boolean` -* Default: `false` - -Return only directories. - -```js -fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] -fg.sync('*', { onlyDirectories: true }); // ['src'] -``` - -> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. - -#### onlyFiles - -* Type: `boolean` -* Default: `true` - -Return only files. - -```js -fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] -fg.sync('*', { onlyFiles: true }); // ['index.js'] -``` - -#### stats - -* Type: `boolean` -* Default: `false` - -Enables an [object mode](#objectmode) with an additional field: - -* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` - -```js -fg.sync('*', { stats: false }); // ['src/index.js'] -fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] -``` - -> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. -> -> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. - -#### unique - -* Type: `boolean` -* Default: `true` - -Ensures that the returned entries are unique. - -```js -fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] -fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] -``` - -If `true` and similar entries are found, the result is the first found. - -### Matching control - -#### braceExpansion - -* Type: `boolean` -* Default: `true` - -Enables Bash-like brace expansion. - -> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. - -```js -dir/ -├── abd -├── acd -└── a{b,c}d -``` - -```js -fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] -fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] -``` - -#### caseSensitiveMatch - -* Type: `boolean` -* Default: `true` - -Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. - -```js -dir/ -├── file.txt -└── File.txt -``` - -```js -fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] -fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] -``` - -#### dot - -* Type: `boolean` -* Default: `false` - -Allow patterns to match entries that begin with a period (`.`). - -> :book: Note that an explicit dot in a portion of the pattern will always match dot files. - -```js -dir/ -├── .editorconfig -└── package.json -``` - -```js -fg.sync('*', { dot: false }); // ['package.json'] -fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] -``` - -#### extglob - -* Type: `boolean` -* Default: `true` - -Enables Bash-like `extglob` functionality. - -> :1234: [Syntax description][micromatch_extglobs]. - -```js -dir/ -├── README.md -└── package.json -``` - -```js -fg.sync('*.+(json|md)', { extglob: false }); // [] -fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] -``` - -#### globstar - -* Type: `boolean` -* Default: `true` - -Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. - -```js -dir/ -└── a - └── b -``` - -```js -fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] -fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] -``` - -#### baseNameMatch - -* Type: `boolean` -* Default: `false` - -If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. - -```js -dir/ -└── one/ - └── file.md -``` - -```js -fg.sync('*.md', { baseNameMatch: false }); // [] -fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] -``` - -## FAQ - -## What is a static or dynamic pattern? - -All patterns can be divided into two types: - -* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. -* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. - -A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: - -* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled -* `\\` (the escape character) -* `*`, `?`, `!` (at the beginning of line) -* `[…]` -* `(…|…)` -* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) -* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) - -## How to write patterns on Windows? - -Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. - -**Bad** - -```ts -[ - 'directory\\*', - path.join(process.cwd(), '**') -] -``` - -**Good** - -```ts -[ - 'directory/*', - fg.convertPathToPattern(process.cwd()) + '/**' -] -``` - -> :book: Use the [`.convertPathToPattern`](#convertpathtopatternpath) package to convert Windows-style path to a Unix-style path. - -Read more about [matching with backslashes][micromatch_backslashes]. - -## Why are parentheses match wrong? - -```js -dir/ -└── (special-*file).txt -``` - -```js -fg.sync(['(special-*file).txt']) // [] -``` - -Refers to Bash. You need to escape special characters: - -```js -fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] -``` - -Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. Or use the [`.escapePath`](#escapepathpath). - -## How to exclude directory from reading? - -You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. - -```js -first/ -├── file.md -└── second/ - └── file.txt -``` - -If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. - -```js -fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] -fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] -``` - -> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. - -You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. - -## How to use UNC path? - -You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax) directly, but you can use them as [`cwd`](#cwd) directory or use the `fg.convertPathToPattern` method. - -```ts -// cwd -fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); -fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); - -// .convertPathToPattern -fg.sync(fg.convertPathToPattern('\\\\?\\c:\\Python27') + '/*'); -``` - -## Compatible with `node-glob`? - -| node-glob | fast-glob | -| :----------: | :-------: | -| `cwd` | [`cwd`](#cwd) | -| `root` | – | -| `dot` | [`dot`](#dot) | -| `nomount` | – | -| `mark` | [`markDirectories`](#markdirectories) | -| `nosort` | – | -| `nounique` | [`unique`](#unique) | -| `nobrace` | [`braceExpansion`](#braceexpansion) | -| `noglobstar` | [`globstar`](#globstar) | -| `noext` | [`extglob`](#extglob) | -| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | -| `matchBase` | [`baseNameMatch`](#basenamematch) | -| `nodir` | [`onlyFiles`](#onlyfiles) | -| `ignore` | [`ignore`](#ignore) | -| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | -| `realpath` | – | -| `absolute` | [`absolute`](#absolute) | - -## Benchmarks - -You can see results [here](https://github.com/mrmlnc/fast-glob/actions/workflows/benchmark.yml?query=branch%3Amaster) for every commit into the `main` branch. - -* **Product benchmark** – comparison with the main competitors. -* **Regress benchmark** – regression between the current version and the version from the npm registry. - -## Changelog - -See the [Releases section of our GitHub project][github_releases] for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. - -[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace -[github_releases]: https://github.com/mrmlnc/fast-glob/releases -[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) -[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html -[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes -[micromatch_braces]: https://github.com/micromatch/braces -[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing -[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs -[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes -[micromatch]: https://github.com/micromatch/micromatch -[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent -[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats -[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams -[node_js]: https://nodejs.org/en -[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode -[npm_normalize_path]: https://www.npmjs.com/package/normalize-path -[npm_unixify]: https://www.npmjs.com/package/unixify -[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash -[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals -[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets -[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html -[unc_path]: https://learn.microsoft.com/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc -[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity -[nodejs_thread_pool]: https://nodejs.org/en/docs/guides/dont-block-the-event-loop -[libuv_thread_pool]: http://docs.libuv.org/en/v1.x/threadpool.html -[windows_naming_conventions]: https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions diff --git a/node_modules/fast-glob/out/index.d.ts b/node_modules/fast-glob/out/index.d.ts deleted file mode 100644 index 46823bb..0000000 --- a/node_modules/fast-glob/out/index.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/// -import * as taskManager from './managers/tasks'; -import { Options as OptionsInternal } from './settings'; -import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; -type EntryObjectModePredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryStatsPredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; -declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; -declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; -declare namespace FastGlob { - type Options = OptionsInternal; - type Entry = EntryInternal; - type Task = taskManager.Task; - type Pattern = PatternInternal; - type FileSystemAdapter = FileSystemAdapterInternal; - const glob: typeof FastGlob; - const globSync: typeof sync; - const globStream: typeof stream; - const async: typeof FastGlob; - function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; - function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; - function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; - function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; - function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - namespace posix { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } - namespace win32 { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } -} -export = FastGlob; diff --git a/node_modules/fast-glob/out/index.js b/node_modules/fast-glob/out/index.js deleted file mode 100644 index 90365d4..0000000 --- a/node_modules/fast-glob/out/index.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; -const taskManager = require("./managers/tasks"); -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -const utils = require("./utils"); -async function FastGlob(source, options) { - assertPatternsInput(source); - const works = getWorks(source, async_1.default, options); - const result = await Promise.all(works); - return utils.array.flatten(result); -} -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -(function (FastGlob) { - FastGlob.glob = FastGlob; - FastGlob.globSync = sync; - FastGlob.globStream = stream; - FastGlob.async = FastGlob; - function sync(source, options) { - assertPatternsInput(source); - const works = getWorks(source, sync_1.default, options); - return utils.array.flatten(works); - } - FastGlob.sync = sync; - function stream(source, options) { - assertPatternsInput(source); - const works = getWorks(source, stream_1.default, options); - /** - * The stream returned by the provider cannot work with an asynchronous iterator. - * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. - * This affects performance (+25%). I don't see best solution right now. - */ - return utils.stream.merge(works); - } - FastGlob.stream = stream; - function generateTasks(source, options) { - assertPatternsInput(source); - const patterns = [].concat(source); - const settings = new settings_1.default(options); - return taskManager.generate(patterns, settings); - } - FastGlob.generateTasks = generateTasks; - function isDynamicPattern(source, options) { - assertPatternsInput(source); - const settings = new settings_1.default(options); - return utils.pattern.isDynamicPattern(source, settings); - } - FastGlob.isDynamicPattern = isDynamicPattern; - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escape(source); - } - FastGlob.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPathToPattern(source); - } - FastGlob.convertPathToPattern = convertPathToPattern; - let posix; - (function (posix) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapePosixPath(source); - } - posix.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPosixPathToPattern(source); - } - posix.convertPathToPattern = convertPathToPattern; - })(posix = FastGlob.posix || (FastGlob.posix = {})); - let win32; - (function (win32) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapeWindowsPath(source); - } - win32.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertWindowsPathToPattern(source); - } - win32.convertPathToPattern = convertPathToPattern; - })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); -})(FastGlob || (FastGlob = {})); -function getWorks(source, _Provider, options) { - const patterns = [].concat(source); - const settings = new settings_1.default(options); - const tasks = taskManager.generate(patterns, settings); - const provider = new _Provider(settings); - return tasks.map(provider.read, provider); -} -function assertPatternsInput(input) { - const source = [].concat(input); - const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); - if (!isValidSource) { - throw new TypeError('Patterns must be a string (non empty) or an array of strings'); - } -} -module.exports = FastGlob; diff --git a/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/fast-glob/out/managers/tasks.d.ts deleted file mode 100644 index 59d2c42..0000000 --- a/node_modules/fast-glob/out/managers/tasks.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import Settings from '../settings'; -import { Pattern, PatternsGroup } from '../types'; -export type Task = { - base: string; - dynamic: boolean; - patterns: Pattern[]; - positive: Pattern[]; - negative: Pattern[]; -}; -export declare function generate(input: Pattern[], settings: Settings): Task[]; -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; -export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; -export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; -export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/fast-glob/out/managers/tasks.js b/node_modules/fast-glob/out/managers/tasks.js deleted file mode 100644 index 335a765..0000000 --- a/node_modules/fast-glob/out/managers/tasks.js +++ /dev/null @@ -1,110 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = require("../utils"); -function generate(input, settings) { - const patterns = processPatterns(input, settings); - const ignore = processPatterns(settings.ignore, settings); - const positivePatterns = getPositivePatterns(patterns); - const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); - const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); - const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); - const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); - const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); - return staticTasks.concat(dynamicTasks); -} -exports.generate = generate; -function processPatterns(input, settings) { - let patterns = input; - /** - * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry - * and some problems with the micromatch package (see fast-glob issues: #365, #394). - * - * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown - * in matching in the case of a large set of patterns after expansion. - */ - if (settings.braceExpansion) { - patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); - } - /** - * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used - * at any nesting level. - * - * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change - * the pattern in the filter before creating a regular expression. There is no need to change the patterns - * in the application. Only on the input. - */ - if (settings.baseNameMatch) { - patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); - } - /** - * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. - */ - return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); -} -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -function convertPatternsToTasks(positive, negative, dynamic) { - const tasks = []; - const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); - const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); - const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); - const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); - tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); - /* - * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory - * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. - */ - if ('.' in insideCurrentDirectoryGroup) { - tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); - } - else { - tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); - } - return tasks; -} -exports.convertPatternsToTasks = convertPatternsToTasks; -function getPositivePatterns(patterns) { - return utils.pattern.getPositivePatterns(patterns); -} -exports.getPositivePatterns = getPositivePatterns; -function getNegativePatternsAsPositive(patterns, ignore) { - const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); - const positive = negative.map(utils.pattern.convertToPositivePattern); - return positive; -} -exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; -function groupPatternsByBaseDirectory(patterns) { - const group = {}; - return patterns.reduce((collection, pattern) => { - const base = utils.pattern.getBaseDirectory(pattern); - if (base in collection) { - collection[base].push(pattern); - } - else { - collection[base] = [pattern]; - } - return collection; - }, group); -} -exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; -function convertPatternGroupsToTasks(positive, negative, dynamic) { - return Object.keys(positive).map((base) => { - return convertPatternGroupToTask(base, positive[base], negative, dynamic); - }); -} -exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; -function convertPatternGroupToTask(base, positive, negative, dynamic) { - return { - dynamic, - positive, - negative, - base, - patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) - }; -} -exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/fast-glob/out/providers/async.d.ts deleted file mode 100644 index 2742616..0000000 --- a/node_modules/fast-glob/out/providers/async.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import ReaderAsync from '../readers/async'; -import Provider from './provider'; -export default class ProviderAsync extends Provider> { - protected _reader: ReaderAsync; - read(task: Task): Promise; - api(root: string, task: Task, options: ReaderOptions): Promise; -} diff --git a/node_modules/fast-glob/out/providers/async.js b/node_modules/fast-glob/out/providers/async.js deleted file mode 100644 index 0c5286e..0000000 --- a/node_modules/fast-glob/out/providers/async.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -const provider_1 = require("./provider"); -class ProviderAsync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new async_1.default(this._settings); - } - async read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = await this.api(root, task, options); - return entries.map((entry) => options.transform(entry)); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderAsync; diff --git a/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/fast-glob/out/providers/filters/deep.d.ts deleted file mode 100644 index 377fab8..0000000 --- a/node_modules/fast-glob/out/providers/filters/deep.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; -import Settings from '../../settings'; -export default class DeepFilter { - private readonly _settings; - private readonly _micromatchOptions; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _getMatcher; - private _getNegativePatternsRe; - private _filter; - private _isSkippedByDeep; - private _getEntryLevel; - private _isSkippedSymbolicLink; - private _isSkippedByPositivePatterns; - private _isSkippedByNegativePatterns; -} diff --git a/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/fast-glob/out/providers/filters/deep.js deleted file mode 100644 index 644bf41..0000000 --- a/node_modules/fast-glob/out/providers/filters/deep.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -const partial_1 = require("../matchers/partial"); -class DeepFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - } - getFilter(basePath, positive, negative) { - const matcher = this._getMatcher(positive); - const negativeRe = this._getNegativePatternsRe(negative); - return (entry) => this._filter(basePath, entry, matcher, negativeRe); - } - _getMatcher(patterns) { - return new partial_1.default(patterns, this._settings, this._micromatchOptions); - } - _getNegativePatternsRe(patterns) { - const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); - return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); - } - _filter(basePath, entry, matcher, negativeRe) { - if (this._isSkippedByDeep(basePath, entry.path)) { - return false; - } - if (this._isSkippedSymbolicLink(entry)) { - return false; - } - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._isSkippedByPositivePatterns(filepath, matcher)) { - return false; - } - return this._isSkippedByNegativePatterns(filepath, negativeRe); - } - _isSkippedByDeep(basePath, entryPath) { - /** - * Avoid unnecessary depth calculations when it doesn't matter. - */ - if (this._settings.deep === Infinity) { - return false; - } - return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; - } - _getEntryLevel(basePath, entryPath) { - const entryPathDepth = entryPath.split('/').length; - if (basePath === '') { - return entryPathDepth; - } - const basePathDepth = basePath.split('/').length; - return entryPathDepth - basePathDepth; - } - _isSkippedSymbolicLink(entry) { - return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); - } - _isSkippedByPositivePatterns(entryPath, matcher) { - return !this._settings.baseNameMatch && !matcher.match(entryPath); - } - _isSkippedByNegativePatterns(entryPath, patternsRe) { - return !utils.pattern.matchAny(entryPath, patternsRe); - } -} -exports.default = DeepFilter; diff --git a/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/fast-glob/out/providers/filters/entry.d.ts deleted file mode 100644 index ee71281..0000000 --- a/node_modules/fast-glob/out/providers/filters/entry.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import Settings from '../../settings'; -import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; -export default class EntryFilter { - private readonly _settings; - private readonly _micromatchOptions; - readonly index: Map; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _filter; - private _isDuplicateEntry; - private _createIndexRecord; - private _onlyFileFilter; - private _onlyDirectoryFilter; - private _isSkippedByAbsoluteNegativePatterns; - private _isMatchToPatterns; -} diff --git a/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/fast-glob/out/providers/filters/entry.js deleted file mode 100644 index 361a7b4..0000000 --- a/node_modules/fast-glob/out/providers/filters/entry.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this.index = new Map(); - } - getFilter(positive, negative) { - const positiveRe = utils.pattern.convertPatternsToRe(positive, this._micromatchOptions); - const negativeRe = utils.pattern.convertPatternsToRe(negative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })); - return (entry) => this._filter(entry, positiveRe, negativeRe); - } - _filter(entry, positiveRe, negativeRe) { - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._settings.unique && this._isDuplicateEntry(filepath)) { - return false; - } - if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { - return false; - } - if (this._isSkippedByAbsoluteNegativePatterns(filepath, negativeRe)) { - return false; - } - const isDirectory = entry.dirent.isDirectory(); - const isMatched = this._isMatchToPatterns(filepath, positiveRe, isDirectory) && !this._isMatchToPatterns(filepath, negativeRe, isDirectory); - if (this._settings.unique && isMatched) { - this._createIndexRecord(filepath); - } - return isMatched; - } - _isDuplicateEntry(filepath) { - return this.index.has(filepath); - } - _createIndexRecord(filepath) { - this.index.set(filepath, undefined); - } - _onlyFileFilter(entry) { - return this._settings.onlyFiles && !entry.dirent.isFile(); - } - _onlyDirectoryFilter(entry) { - return this._settings.onlyDirectories && !entry.dirent.isDirectory(); - } - _isSkippedByAbsoluteNegativePatterns(entryPath, patternsRe) { - if (!this._settings.absolute) { - return false; - } - const fullpath = utils.path.makeAbsolute(this._settings.cwd, entryPath); - return utils.pattern.matchAny(fullpath, patternsRe); - } - _isMatchToPatterns(filepath, patternsRe, isDirectory) { - // Trying to match files and directories by patterns. - const isMatched = utils.pattern.matchAny(filepath, patternsRe); - // A pattern with a trailling slash can be used for directory matching. - // To apply such pattern, we need to add a tralling slash to the path. - if (!isMatched && isDirectory) { - return utils.pattern.matchAny(filepath + '/', patternsRe); - } - return isMatched; - } -} -exports.default = EntryFilter; diff --git a/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/fast-glob/out/providers/filters/error.d.ts deleted file mode 100644 index 170eb25..0000000 --- a/node_modules/fast-glob/out/providers/filters/error.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { ErrorFilterFunction } from '../../types'; -export default class ErrorFilter { - private readonly _settings; - constructor(_settings: Settings); - getFilter(): ErrorFilterFunction; - private _isNonFatalError; -} diff --git a/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/fast-glob/out/providers/filters/error.js deleted file mode 100644 index 1c6f241..0000000 --- a/node_modules/fast-glob/out/providers/filters/error.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class ErrorFilter { - constructor(_settings) { - this._settings = _settings; - } - getFilter() { - return (error) => this._isNonFatalError(error); - } - _isNonFatalError(error) { - return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; - } -} -exports.default = ErrorFilter; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts deleted file mode 100644 index d04c232..0000000 --- a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { Pattern, MicromatchOptions, PatternRe } from '../../types'; -import Settings from '../../settings'; -export type PatternSegment = StaticPatternSegment | DynamicPatternSegment; -type StaticPatternSegment = { - dynamic: false; - pattern: Pattern; -}; -type DynamicPatternSegment = { - dynamic: true; - pattern: Pattern; - patternRe: PatternRe; -}; -export type PatternSection = PatternSegment[]; -export type PatternInfo = { - /** - * Indicates that the pattern has a globstar (more than a single section). - */ - complete: boolean; - pattern: Pattern; - segments: PatternSegment[]; - sections: PatternSection[]; -}; -export default abstract class Matcher { - private readonly _patterns; - private readonly _settings; - private readonly _micromatchOptions; - protected readonly _storage: PatternInfo[]; - constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); - private _fillStorage; - private _getPatternSegments; - private _splitSegmentsIntoSections; -} -export {}; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/fast-glob/out/providers/matchers/matcher.js deleted file mode 100644 index eae67c9..0000000 --- a/node_modules/fast-glob/out/providers/matchers/matcher.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class Matcher { - constructor(_patterns, _settings, _micromatchOptions) { - this._patterns = _patterns; - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this._storage = []; - this._fillStorage(); - } - _fillStorage() { - for (const pattern of this._patterns) { - const segments = this._getPatternSegments(pattern); - const sections = this._splitSegmentsIntoSections(segments); - this._storage.push({ - complete: sections.length <= 1, - pattern, - segments, - sections - }); - } - } - _getPatternSegments(pattern) { - const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); - return parts.map((part) => { - const dynamic = utils.pattern.isDynamicPattern(part, this._settings); - if (!dynamic) { - return { - dynamic: false, - pattern: part - }; - } - return { - dynamic: true, - pattern: part, - patternRe: utils.pattern.makeRe(part, this._micromatchOptions) - }; - }); - } - _splitSegmentsIntoSections(segments) { - return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); - } -} -exports.default = Matcher; diff --git a/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/fast-glob/out/providers/matchers/partial.d.ts deleted file mode 100644 index 91520f6..0000000 --- a/node_modules/fast-glob/out/providers/matchers/partial.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import Matcher from './matcher'; -export default class PartialMatcher extends Matcher { - match(filepath: string): boolean; -} diff --git a/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/fast-glob/out/providers/matchers/partial.js deleted file mode 100644 index 1dfffeb..0000000 --- a/node_modules/fast-glob/out/providers/matchers/partial.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const matcher_1 = require("./matcher"); -class PartialMatcher extends matcher_1.default { - match(filepath) { - const parts = filepath.split('/'); - const levels = parts.length; - const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); - for (const pattern of patterns) { - const section = pattern.sections[0]; - /** - * In this case, the pattern has a globstar and we must read all directories unconditionally, - * but only if the level has reached the end of the first group. - * - * fixtures/{a,b}/** - * ^ true/false ^ always true - */ - if (!pattern.complete && levels > section.length) { - return true; - } - const match = parts.every((part, index) => { - const segment = pattern.segments[index]; - if (segment.dynamic && segment.patternRe.test(part)) { - return true; - } - if (!segment.dynamic && segment.pattern === part) { - return true; - } - return false; - }); - if (match) { - return true; - } - } - return false; - } -} -exports.default = PartialMatcher; diff --git a/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/fast-glob/out/providers/provider.d.ts deleted file mode 100644 index 1053460..0000000 --- a/node_modules/fast-glob/out/providers/provider.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Task } from '../managers/tasks'; -import Settings from '../settings'; -import { MicromatchOptions, ReaderOptions } from '../types'; -import DeepFilter from './filters/deep'; -import EntryFilter from './filters/entry'; -import ErrorFilter from './filters/error'; -import EntryTransformer from './transformers/entry'; -export default abstract class Provider { - protected readonly _settings: Settings; - readonly errorFilter: ErrorFilter; - readonly entryFilter: EntryFilter; - readonly deepFilter: DeepFilter; - readonly entryTransformer: EntryTransformer; - constructor(_settings: Settings); - abstract read(_task: Task): T; - protected _getRootDirectory(task: Task): string; - protected _getReaderOptions(task: Task): ReaderOptions; - protected _getMicromatchOptions(): MicromatchOptions; -} diff --git a/node_modules/fast-glob/out/providers/provider.js b/node_modules/fast-glob/out/providers/provider.js deleted file mode 100644 index da88ee0..0000000 --- a/node_modules/fast-glob/out/providers/provider.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const deep_1 = require("./filters/deep"); -const entry_1 = require("./filters/entry"); -const error_1 = require("./filters/error"); -const entry_2 = require("./transformers/entry"); -class Provider { - constructor(_settings) { - this._settings = _settings; - this.errorFilter = new error_1.default(this._settings); - this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); - this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); - this.entryTransformer = new entry_2.default(this._settings); - } - _getRootDirectory(task) { - return path.resolve(this._settings.cwd, task.base); - } - _getReaderOptions(task) { - const basePath = task.base === '.' ? '' : task.base; - return { - basePath, - pathSegmentSeparator: '/', - concurrency: this._settings.concurrency, - deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), - entryFilter: this.entryFilter.getFilter(task.positive, task.negative), - errorFilter: this.errorFilter.getFilter(), - followSymbolicLinks: this._settings.followSymbolicLinks, - fs: this._settings.fs, - stats: this._settings.stats, - throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, - transform: this.entryTransformer.getTransformer() - }; - } - _getMicromatchOptions() { - return { - dot: this._settings.dot, - matchBase: this._settings.baseNameMatch, - nobrace: !this._settings.braceExpansion, - nocase: !this._settings.caseSensitiveMatch, - noext: !this._settings.extglob, - noglobstar: !this._settings.globstar, - posix: true, - strictSlashes: false - }; - } -} -exports.default = Provider; diff --git a/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/fast-glob/out/providers/stream.d.ts deleted file mode 100644 index 3d02a1f..0000000 --- a/node_modules/fast-glob/out/providers/stream.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -/// -import { Readable } from 'stream'; -import { Task } from '../managers/tasks'; -import ReaderStream from '../readers/stream'; -import { ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderStream extends Provider { - protected _reader: ReaderStream; - read(task: Task): Readable; - api(root: string, task: Task, options: ReaderOptions): Readable; -} diff --git a/node_modules/fast-glob/out/providers/stream.js b/node_modules/fast-glob/out/providers/stream.js deleted file mode 100644 index 85da62e..0000000 --- a/node_modules/fast-glob/out/providers/stream.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const stream_2 = require("../readers/stream"); -const provider_1 = require("./provider"); -class ProviderStream extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_2.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const source = this.api(root, task, options); - const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); - source - .once('error', (error) => destination.emit('error', error)) - .on('data', (entry) => destination.emit('data', options.transform(entry))) - .once('end', () => destination.emit('end')); - destination - .once('close', () => source.destroy()); - return destination; - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderStream; diff --git a/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/fast-glob/out/providers/sync.d.ts deleted file mode 100644 index 9c0fe1e..0000000 --- a/node_modules/fast-glob/out/providers/sync.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import ReaderSync from '../readers/sync'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderSync extends Provider { - protected _reader: ReaderSync; - read(task: Task): EntryItem[]; - api(root: string, task: Task, options: ReaderOptions): Entry[]; -} diff --git a/node_modules/fast-glob/out/providers/sync.js b/node_modules/fast-glob/out/providers/sync.js deleted file mode 100644 index d70aa1b..0000000 --- a/node_modules/fast-glob/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -const provider_1 = require("./provider"); -class ProviderSync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new sync_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = this.api(root, task, options); - return entries.map(options.transform); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderSync; diff --git a/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/fast-glob/out/providers/transformers/entry.d.ts deleted file mode 100644 index e9b85fa..0000000 --- a/node_modules/fast-glob/out/providers/transformers/entry.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { EntryTransformerFunction } from '../../types'; -export default class EntryTransformer { - private readonly _settings; - constructor(_settings: Settings); - getTransformer(): EntryTransformerFunction; - private _transform; -} diff --git a/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/fast-glob/out/providers/transformers/entry.js deleted file mode 100644 index d11903c..0000000 --- a/node_modules/fast-glob/out/providers/transformers/entry.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryTransformer { - constructor(_settings) { - this._settings = _settings; - } - getTransformer() { - return (entry) => this._transform(entry); - } - _transform(entry) { - let filepath = entry.path; - if (this._settings.absolute) { - filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); - filepath = utils.path.unixify(filepath); - } - if (this._settings.markDirectories && entry.dirent.isDirectory()) { - filepath += '/'; - } - if (!this._settings.objectMode) { - return filepath; - } - return Object.assign(Object.assign({}, entry), { path: filepath }); - } -} -exports.default = EntryTransformer; diff --git a/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/fast-glob/out/readers/async.d.ts deleted file mode 100644 index fbca428..0000000 --- a/node_modules/fast-glob/out/readers/async.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, ReaderOptions, Pattern } from '../types'; -import Reader from './reader'; -import ReaderStream from './stream'; -export default class ReaderAsync extends Reader> { - protected _walkAsync: typeof fsWalk.walk; - protected _readerStream: ReaderStream; - dynamic(root: string, options: ReaderOptions): Promise; - static(patterns: Pattern[], options: ReaderOptions): Promise; -} diff --git a/node_modules/fast-glob/out/readers/async.js b/node_modules/fast-glob/out/readers/async.js deleted file mode 100644 index d024145..0000000 --- a/node_modules/fast-glob/out/readers/async.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -const stream_1 = require("./stream"); -class ReaderAsync extends reader_1.default { - constructor() { - super(...arguments); - this._walkAsync = fsWalk.walk; - this._readerStream = new stream_1.default(this._settings); - } - dynamic(root, options) { - return new Promise((resolve, reject) => { - this._walkAsync(root, options, (error, entries) => { - if (error === null) { - resolve(entries); - } - else { - reject(error); - } - }); - }); - } - async static(patterns, options) { - const entries = []; - const stream = this._readerStream.static(patterns, options); - // After #235, replace it with an asynchronous iterator. - return new Promise((resolve, reject) => { - stream.once('error', reject); - stream.on('data', (entry) => entries.push(entry)); - stream.once('end', () => resolve(entries)); - }); - } -} -exports.default = ReaderAsync; diff --git a/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/fast-glob/out/readers/reader.d.ts deleted file mode 100644 index 2af16b6..0000000 --- a/node_modules/fast-glob/out/readers/reader.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -/// -import * as fs from 'fs'; -import * as fsStat from '@nodelib/fs.stat'; -import Settings from '../settings'; -import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; -export default abstract class Reader { - protected readonly _settings: Settings; - protected readonly _fsStatSettings: fsStat.Settings; - constructor(_settings: Settings); - abstract dynamic(root: string, options: ReaderOptions): T; - abstract static(patterns: Pattern[], options: ReaderOptions): T; - protected _getFullEntryPath(filepath: string): string; - protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; - protected _isFatalError(error: ErrnoException): boolean; -} diff --git a/node_modules/fast-glob/out/readers/reader.js b/node_modules/fast-glob/out/readers/reader.js deleted file mode 100644 index 7b40255..0000000 --- a/node_modules/fast-glob/out/readers/reader.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const utils = require("../utils"); -class Reader { - constructor(_settings) { - this._settings = _settings; - this._fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this._settings.followSymbolicLinks, - fs: this._settings.fs, - throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks - }); - } - _getFullEntryPath(filepath) { - return path.resolve(this._settings.cwd, filepath); - } - _makeEntry(stats, pattern) { - const entry = { - name: pattern, - path: pattern, - dirent: utils.fs.createDirentFromStats(pattern, stats) - }; - if (this._settings.stats) { - entry.stats = stats; - } - return entry; - } - _isFatalError(error) { - return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; - } -} -exports.default = Reader; diff --git a/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/fast-glob/out/readers/stream.d.ts deleted file mode 100644 index 1c74cac..0000000 --- a/node_modules/fast-glob/out/readers/stream.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import { Readable } from 'stream'; -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderStream extends Reader { - protected _walkStream: typeof fsWalk.walkStream; - protected _stat: typeof fsStat.stat; - dynamic(root: string, options: ReaderOptions): Readable; - static(patterns: Pattern[], options: ReaderOptions): Readable; - private _getEntry; - private _getStat; -} diff --git a/node_modules/fast-glob/out/readers/stream.js b/node_modules/fast-glob/out/readers/stream.js deleted file mode 100644 index 317c6d5..0000000 --- a/node_modules/fast-glob/out/readers/stream.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderStream extends reader_1.default { - constructor() { - super(...arguments); - this._walkStream = fsWalk.walkStream; - this._stat = fsStat.stat; - } - dynamic(root, options) { - return this._walkStream(root, options); - } - static(patterns, options) { - const filepaths = patterns.map(this._getFullEntryPath, this); - const stream = new stream_1.PassThrough({ objectMode: true }); - stream._write = (index, _enc, done) => { - return this._getEntry(filepaths[index], patterns[index], options) - .then((entry) => { - if (entry !== null && options.entryFilter(entry)) { - stream.push(entry); - } - if (index === filepaths.length - 1) { - stream.end(); - } - done(); - }) - .catch(done); - }; - for (let i = 0; i < filepaths.length; i++) { - stream.write(i); - } - return stream; - } - _getEntry(filepath, pattern, options) { - return this._getStat(filepath) - .then((stats) => this._makeEntry(stats, pattern)) - .catch((error) => { - if (options.errorFilter(error)) { - return null; - } - throw error; - }); - } - _getStat(filepath) { - return new Promise((resolve, reject) => { - this._stat(filepath, this._fsStatSettings, (error, stats) => { - return error === null ? resolve(stats) : reject(error); - }); - }); - } -} -exports.default = ReaderStream; diff --git a/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/fast-glob/out/readers/sync.d.ts deleted file mode 100644 index c96ffee..0000000 --- a/node_modules/fast-glob/out/readers/sync.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderSync extends Reader { - protected _walkSync: typeof fsWalk.walkSync; - protected _statSync: typeof fsStat.statSync; - dynamic(root: string, options: ReaderOptions): Entry[]; - static(patterns: Pattern[], options: ReaderOptions): Entry[]; - private _getEntry; - private _getStat; -} diff --git a/node_modules/fast-glob/out/readers/sync.js b/node_modules/fast-glob/out/readers/sync.js deleted file mode 100644 index 4704d65..0000000 --- a/node_modules/fast-glob/out/readers/sync.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderSync extends reader_1.default { - constructor() { - super(...arguments); - this._walkSync = fsWalk.walkSync; - this._statSync = fsStat.statSync; - } - dynamic(root, options) { - return this._walkSync(root, options); - } - static(patterns, options) { - const entries = []; - for (const pattern of patterns) { - const filepath = this._getFullEntryPath(pattern); - const entry = this._getEntry(filepath, pattern, options); - if (entry === null || !options.entryFilter(entry)) { - continue; - } - entries.push(entry); - } - return entries; - } - _getEntry(filepath, pattern, options) { - try { - const stats = this._getStat(filepath); - return this._makeEntry(stats, pattern); - } - catch (error) { - if (options.errorFilter(error)) { - return null; - } - throw error; - } - } - _getStat(filepath) { - return this._statSync(filepath, this._fsStatSettings); - } -} -exports.default = ReaderSync; diff --git a/node_modules/fast-glob/out/settings.d.ts b/node_modules/fast-glob/out/settings.d.ts deleted file mode 100644 index 76a74f8..0000000 --- a/node_modules/fast-glob/out/settings.d.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { FileSystemAdapter, Pattern } from './types'; -export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export type Options = { - /** - * Return the absolute path for entries. - * - * @default false - */ - absolute?: boolean; - /** - * If set to `true`, then patterns without slashes will be matched against - * the basename of the path if it contains slashes. - * - * @default false - */ - baseNameMatch?: boolean; - /** - * Enables Bash-like brace expansion. - * - * @default true - */ - braceExpansion?: boolean; - /** - * Enables a case-sensitive mode for matching files. - * - * @default true - */ - caseSensitiveMatch?: boolean; - /** - * Specifies the maximum number of concurrent requests from a reader to read - * directories. - * - * @default os.cpus().length - */ - concurrency?: number; - /** - * The current working directory in which to search. - * - * @default process.cwd() - */ - cwd?: string; - /** - * Specifies the maximum depth of a read directory relative to the start - * directory. - * - * @default Infinity - */ - deep?: number; - /** - * Allow patterns to match entries that begin with a period (`.`). - * - * @default false - */ - dot?: boolean; - /** - * Enables Bash-like `extglob` functionality. - * - * @default true - */ - extglob?: boolean; - /** - * Indicates whether to traverse descendants of symbolic link directories. - * - * @default true - */ - followSymbolicLinks?: boolean; - /** - * Custom implementation of methods for working with the file system. - * - * @default fs.* - */ - fs?: Partial; - /** - * Enables recursively repeats a pattern containing `**`. - * If `false`, `**` behaves exactly like `*`. - * - * @default true - */ - globstar?: boolean; - /** - * An array of glob patterns to exclude matches. - * This is an alternative way to use negative patterns. - * - * @default [] - */ - ignore?: Pattern[]; - /** - * Mark the directory path with the final slash. - * - * @default false - */ - markDirectories?: boolean; - /** - * Returns objects (instead of strings) describing entries. - * - * @default false - */ - objectMode?: boolean; - /** - * Return only directories. - * - * @default false - */ - onlyDirectories?: boolean; - /** - * Return only files. - * - * @default true - */ - onlyFiles?: boolean; - /** - * Enables an object mode (`objectMode`) with an additional `stats` field. - * - * @default false - */ - stats?: boolean; - /** - * By default this package suppress only `ENOENT` errors. - * Set to `true` to suppress any error. - * - * @default false - */ - suppressErrors?: boolean; - /** - * Throw an error when symbolic link is broken if `true` or safely - * return `lstat` call if `false`. - * - * @default false - */ - throwErrorOnBrokenSymbolicLink?: boolean; - /** - * Ensures that the returned entries are unique. - * - * @default true - */ - unique?: boolean; -}; -export default class Settings { - private readonly _options; - readonly absolute: boolean; - readonly baseNameMatch: boolean; - readonly braceExpansion: boolean; - readonly caseSensitiveMatch: boolean; - readonly concurrency: number; - readonly cwd: string; - readonly deep: number; - readonly dot: boolean; - readonly extglob: boolean; - readonly followSymbolicLinks: boolean; - readonly fs: FileSystemAdapter; - readonly globstar: boolean; - readonly ignore: Pattern[]; - readonly markDirectories: boolean; - readonly objectMode: boolean; - readonly onlyDirectories: boolean; - readonly onlyFiles: boolean; - readonly stats: boolean; - readonly suppressErrors: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly unique: boolean; - constructor(_options?: Options); - private _getValue; - private _getFileSystemMethods; -} diff --git a/node_modules/fast-glob/out/settings.js b/node_modules/fast-glob/out/settings.js deleted file mode 100644 index 23f916c..0000000 --- a/node_modules/fast-glob/out/settings.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -const os = require("os"); -/** - * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. - * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 - */ -const CPU_COUNT = Math.max(os.cpus().length, 1); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - lstatSync: fs.lstatSync, - stat: fs.stat, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -class Settings { - constructor(_options = {}) { - this._options = _options; - this.absolute = this._getValue(this._options.absolute, false); - this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); - this.braceExpansion = this._getValue(this._options.braceExpansion, true); - this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); - this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); - this.cwd = this._getValue(this._options.cwd, process.cwd()); - this.deep = this._getValue(this._options.deep, Infinity); - this.dot = this._getValue(this._options.dot, false); - this.extglob = this._getValue(this._options.extglob, true); - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); - this.fs = this._getFileSystemMethods(this._options.fs); - this.globstar = this._getValue(this._options.globstar, true); - this.ignore = this._getValue(this._options.ignore, []); - this.markDirectories = this._getValue(this._options.markDirectories, false); - this.objectMode = this._getValue(this._options.objectMode, false); - this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); - this.onlyFiles = this._getValue(this._options.onlyFiles, true); - this.stats = this._getValue(this._options.stats, false); - this.suppressErrors = this._getValue(this._options.suppressErrors, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); - this.unique = this._getValue(this._options.unique, true); - if (this.onlyDirectories) { - this.onlyFiles = false; - } - if (this.stats) { - this.objectMode = true; - } - // Remove the cast to the array in the next major (#404). - this.ignore = [].concat(this.ignore); - } - _getValue(option, value) { - return option === undefined ? value : option; - } - _getFileSystemMethods(methods = {}) { - return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); - } -} -exports.default = Settings; diff --git a/node_modules/fast-glob/out/types/index.d.ts b/node_modules/fast-glob/out/types/index.d.ts deleted file mode 100644 index 6506caf..0000000 --- a/node_modules/fast-glob/out/types/index.d.ts +++ /dev/null @@ -1,31 +0,0 @@ -/// -import * as fsWalk from '@nodelib/fs.walk'; -export type ErrnoException = NodeJS.ErrnoException; -export type Entry = fsWalk.Entry; -export type EntryItem = string | Entry; -export type Pattern = string; -export type PatternRe = RegExp; -export type PatternsGroup = Record; -export type ReaderOptions = fsWalk.Options & { - transform(entry: Entry): EntryItem; - deepFilter: DeepFilterFunction; - entryFilter: EntryFilterFunction; - errorFilter: ErrorFilterFunction; - fs: FileSystemAdapter; - stats: boolean; -}; -export type ErrorFilterFunction = fsWalk.ErrorFilterFunction; -export type EntryFilterFunction = fsWalk.EntryFilterFunction; -export type DeepFilterFunction = fsWalk.DeepFilterFunction; -export type EntryTransformerFunction = (entry: Entry) => EntryItem; -export type MicromatchOptions = { - dot?: boolean; - matchBase?: boolean; - nobrace?: boolean; - nocase?: boolean; - noext?: boolean; - noglobstar?: boolean; - posix?: boolean; - strictSlashes?: boolean; -}; -export type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/fast-glob/out/types/index.js b/node_modules/fast-glob/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/fast-glob/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/fast-glob/out/utils/array.d.ts deleted file mode 100644 index 98e7325..0000000 --- a/node_modules/fast-glob/out/utils/array.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function flatten(items: T[][]): T[]; -export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/fast-glob/out/utils/array.js b/node_modules/fast-glob/out/utils/array.js deleted file mode 100644 index 50c406e..0000000 --- a/node_modules/fast-glob/out/utils/array.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.splitWhen = exports.flatten = void 0; -function flatten(items) { - return items.reduce((collection, item) => [].concat(collection, item), []); -} -exports.flatten = flatten; -function splitWhen(items, predicate) { - const result = [[]]; - let groupIndex = 0; - for (const item of items) { - if (predicate(item)) { - groupIndex++; - result[groupIndex] = []; - } - else { - result[groupIndex].push(item); - } - } - return result; -} -exports.splitWhen = splitWhen; diff --git a/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/fast-glob/out/utils/errno.d.ts deleted file mode 100644 index 1c08d3b..0000000 --- a/node_modules/fast-glob/out/utils/errno.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { ErrnoException } from '../types'; -export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/fast-glob/out/utils/errno.js b/node_modules/fast-glob/out/utils/errno.js deleted file mode 100644 index f0bd801..0000000 --- a/node_modules/fast-glob/out/utils/errno.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEnoentCodeError = void 0; -function isEnoentCodeError(error) { - return error.code === 'ENOENT'; -} -exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/fast-glob/out/utils/fs.d.ts deleted file mode 100644 index 64c61ce..0000000 --- a/node_modules/fast-glob/out/utils/fs.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import * as fs from 'fs'; -import { Dirent } from '@nodelib/fs.walk'; -export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/fast-glob/out/utils/fs.js b/node_modules/fast-glob/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/fast-glob/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/fast-glob/out/utils/index.d.ts deleted file mode 100644 index f634cad..0000000 --- a/node_modules/fast-glob/out/utils/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import * as array from './array'; -import * as errno from './errno'; -import * as fs from './fs'; -import * as path from './path'; -import * as pattern from './pattern'; -import * as stream from './stream'; -import * as string from './string'; -export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/fast-glob/out/utils/index.js b/node_modules/fast-glob/out/utils/index.js deleted file mode 100644 index 0f92c16..0000000 --- a/node_modules/fast-glob/out/utils/index.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; -const array = require("./array"); -exports.array = array; -const errno = require("./errno"); -exports.errno = errno; -const fs = require("./fs"); -exports.fs = fs; -const path = require("./path"); -exports.path = path; -const pattern = require("./pattern"); -exports.pattern = pattern; -const stream = require("./stream"); -exports.stream = stream; -const string = require("./string"); -exports.string = string; diff --git a/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/fast-glob/out/utils/path.d.ts deleted file mode 100644 index 0b13f4b..0000000 --- a/node_modules/fast-glob/out/utils/path.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Pattern } from '../types'; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -export declare function unixify(filepath: string): string; -export declare function makeAbsolute(cwd: string, filepath: string): string; -export declare function removeLeadingDotSegment(entry: string): string; -export declare const escape: typeof escapeWindowsPath; -export declare function escapeWindowsPath(pattern: Pattern): Pattern; -export declare function escapePosixPath(pattern: Pattern): Pattern; -export declare const convertPathToPattern: typeof convertWindowsPathToPattern; -export declare function convertWindowsPathToPattern(filepath: string): Pattern; -export declare function convertPosixPathToPattern(filepath: string): Pattern; diff --git a/node_modules/fast-glob/out/utils/path.js b/node_modules/fast-glob/out/utils/path.js deleted file mode 100644 index 7b53b39..0000000 --- a/node_modules/fast-glob/out/utils/path.js +++ /dev/null @@ -1,68 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; -const os = require("os"); -const path = require("path"); -const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; -const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ -/** - * All non-escaped special characters. - * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. - * Windows: (){}[], !+@ before (, ! at the beginning. - */ -const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; -const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; -/** - * The device path (\\.\ or \\?\). - * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths - */ -const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; -/** - * All backslashes except those escaping special characters. - * Windows: !()+@{} - * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions - */ -const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -function unixify(filepath) { - return filepath.replace(/\\/g, '/'); -} -exports.unixify = unixify; -function makeAbsolute(cwd, filepath) { - return path.resolve(cwd, filepath); -} -exports.makeAbsolute = makeAbsolute; -function removeLeadingDotSegment(entry) { - // We do not use `startsWith` because this is 10x slower than current implementation for some cases. - // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with - if (entry.charAt(0) === '.') { - const secondCharactery = entry.charAt(1); - if (secondCharactery === '/' || secondCharactery === '\\') { - return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); - } - } - return entry; -} -exports.removeLeadingDotSegment = removeLeadingDotSegment; -exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; -function escapeWindowsPath(pattern) { - return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapeWindowsPath = escapeWindowsPath; -function escapePosixPath(pattern) { - return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapePosixPath = escapePosixPath; -exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; -function convertWindowsPathToPattern(filepath) { - return escapeWindowsPath(filepath) - .replace(DOS_DEVICE_PATH_RE, '//$1') - .replace(WINDOWS_BACKSLASHES_RE, '/'); -} -exports.convertWindowsPathToPattern = convertWindowsPathToPattern; -function convertPosixPathToPattern(filepath) { - return escapePosixPath(filepath); -} -exports.convertPosixPathToPattern = convertPosixPathToPattern; diff --git a/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/fast-glob/out/utils/pattern.d.ts deleted file mode 100644 index e7ff07b..0000000 --- a/node_modules/fast-glob/out/utils/pattern.d.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { MicromatchOptions, Pattern, PatternRe } from '../types'; -type PatternTypeOptions = { - braceExpansion?: boolean; - caseSensitiveMatch?: boolean; - extglob?: boolean; -}; -export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function convertToPositivePattern(pattern: Pattern): Pattern; -export declare function convertToNegativePattern(pattern: Pattern): Pattern; -export declare function isNegativePattern(pattern: Pattern): boolean; -export declare function isPositivePattern(pattern: Pattern): boolean; -export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; -export declare function getBaseDirectory(pattern: Pattern): string; -export declare function hasGlobStar(pattern: Pattern): boolean; -export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; -export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; -export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; -export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; -export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; -export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; -export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; -export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -export declare function removeDuplicateSlashes(pattern: string): string; -export {}; diff --git a/node_modules/fast-glob/out/utils/pattern.js b/node_modules/fast-glob/out/utils/pattern.js deleted file mode 100644 index d7d4e91..0000000 --- a/node_modules/fast-glob/out/utils/pattern.js +++ /dev/null @@ -1,188 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; -const path = require("path"); -const globParent = require("glob-parent"); -const micromatch = require("micromatch"); -const GLOBSTAR = '**'; -const ESCAPE_SYMBOL = '\\'; -const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; -const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; -const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; -const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; -const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; -/** - * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. - * The latter is due to the presence of the device path at the beginning of the UNC path. - */ -const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; -function isStaticPattern(pattern, options = {}) { - return !isDynamicPattern(pattern, options); -} -exports.isStaticPattern = isStaticPattern; -function isDynamicPattern(pattern, options = {}) { - /** - * A special case with an empty string is necessary for matching patterns that start with a forward slash. - * An empty string cannot be a dynamic pattern. - * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. - */ - if (pattern === '') { - return false; - } - /** - * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check - * filepath directly (without read directory). - */ - if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { - return true; - } - if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { - return true; - } - return false; -} -exports.isDynamicPattern = isDynamicPattern; -function hasBraceExpansion(pattern) { - const openingBraceIndex = pattern.indexOf('{'); - if (openingBraceIndex === -1) { - return false; - } - const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); - if (closingBraceIndex === -1) { - return false; - } - const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); - return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); -} -function convertToPositivePattern(pattern) { - return isNegativePattern(pattern) ? pattern.slice(1) : pattern; -} -exports.convertToPositivePattern = convertToPositivePattern; -function convertToNegativePattern(pattern) { - return '!' + pattern; -} -exports.convertToNegativePattern = convertToNegativePattern; -function isNegativePattern(pattern) { - return pattern.startsWith('!') && pattern[1] !== '('; -} -exports.isNegativePattern = isNegativePattern; -function isPositivePattern(pattern) { - return !isNegativePattern(pattern); -} -exports.isPositivePattern = isPositivePattern; -function getNegativePatterns(patterns) { - return patterns.filter(isNegativePattern); -} -exports.getNegativePatterns = getNegativePatterns; -function getPositivePatterns(patterns) { - return patterns.filter(isPositivePattern); -} -exports.getPositivePatterns = getPositivePatterns; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsInsideCurrentDirectory(patterns) { - return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); -} -exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsOutsideCurrentDirectory(patterns) { - return patterns.filter(isPatternRelatedToParentDirectory); -} -exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; -function isPatternRelatedToParentDirectory(pattern) { - return pattern.startsWith('..') || pattern.startsWith('./..'); -} -exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; -function getBaseDirectory(pattern) { - return globParent(pattern, { flipBackslashes: false }); -} -exports.getBaseDirectory = getBaseDirectory; -function hasGlobStar(pattern) { - return pattern.includes(GLOBSTAR); -} -exports.hasGlobStar = hasGlobStar; -function endsWithSlashGlobStar(pattern) { - return pattern.endsWith('/' + GLOBSTAR); -} -exports.endsWithSlashGlobStar = endsWithSlashGlobStar; -function isAffectDepthOfReadingPattern(pattern) { - const basename = path.basename(pattern); - return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); -} -exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; -function expandPatternsWithBraceExpansion(patterns) { - return patterns.reduce((collection, pattern) => { - return collection.concat(expandBraceExpansion(pattern)); - }, []); -} -exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; -function expandBraceExpansion(pattern) { - const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); - /** - * Sort the patterns by length so that the same depth patterns are processed side by side. - * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` - */ - patterns.sort((a, b) => a.length - b.length); - /** - * Micromatch can return an empty string in the case of patterns like `{a,}`. - */ - return patterns.filter((pattern) => pattern !== ''); -} -exports.expandBraceExpansion = expandBraceExpansion; -function getPatternParts(pattern, options) { - let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); - /** - * The scan method returns an empty array in some cases. - * See micromatch/picomatch#58 for more details. - */ - if (parts.length === 0) { - parts = [pattern]; - } - /** - * The scan method does not return an empty part for the pattern with a forward slash. - * This is another part of micromatch/picomatch#58. - */ - if (parts[0].startsWith('/')) { - parts[0] = parts[0].slice(1); - parts.unshift(''); - } - return parts; -} -exports.getPatternParts = getPatternParts; -function makeRe(pattern, options) { - return micromatch.makeRe(pattern, options); -} -exports.makeRe = makeRe; -function convertPatternsToRe(patterns, options) { - return patterns.map((pattern) => makeRe(pattern, options)); -} -exports.convertPatternsToRe = convertPatternsToRe; -function matchAny(entry, patternsRe) { - return patternsRe.some((patternRe) => patternRe.test(entry)); -} -exports.matchAny = matchAny; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -function removeDuplicateSlashes(pattern) { - return pattern.replace(DOUBLE_SLASH_RE, '/'); -} -exports.removeDuplicateSlashes = removeDuplicateSlashes; diff --git a/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/fast-glob/out/utils/stream.d.ts deleted file mode 100644 index 4daf913..0000000 --- a/node_modules/fast-glob/out/utils/stream.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -/// -import { Readable } from 'stream'; -export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/fast-glob/out/utils/stream.js b/node_modules/fast-glob/out/utils/stream.js deleted file mode 100644 index b32028c..0000000 --- a/node_modules/fast-glob/out/utils/stream.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.merge = void 0; -const merge2 = require("merge2"); -function merge(streams) { - const mergedStream = merge2(streams); - streams.forEach((stream) => { - stream.once('error', (error) => mergedStream.emit('error', error)); - }); - mergedStream.once('close', () => propagateCloseEventToSources(streams)); - mergedStream.once('end', () => propagateCloseEventToSources(streams)); - return mergedStream; -} -exports.merge = merge; -function propagateCloseEventToSources(streams) { - streams.forEach((stream) => stream.emit('close')); -} diff --git a/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/fast-glob/out/utils/string.d.ts deleted file mode 100644 index c884735..0000000 --- a/node_modules/fast-glob/out/utils/string.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function isString(input: unknown): input is string; -export declare function isEmpty(input: string): boolean; diff --git a/node_modules/fast-glob/out/utils/string.js b/node_modules/fast-glob/out/utils/string.js deleted file mode 100644 index 76e7ea5..0000000 --- a/node_modules/fast-glob/out/utils/string.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEmpty = exports.isString = void 0; -function isString(input) { - return typeof input === 'string'; -} -exports.isString = isString; -function isEmpty(input) { - return input === ''; -} -exports.isEmpty = isEmpty; diff --git a/node_modules/fast-glob/package.json b/node_modules/fast-glob/package.json deleted file mode 100644 index 770cc6e..0000000 --- a/node_modules/fast-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "fast-glob", - "version": "3.3.2", - "description": "It's a very fast and efficient glob library for Node.js", - "license": "MIT", - "repository": "mrmlnc/fast-glob", - "author": { - "name": "Denis Malinochkin", - "url": "https://mrmlnc.com" - }, - "engines": { - "node": ">=8.6.0" - }, - "main": "out/index.js", - "typings": "out/index.d.ts", - "files": [ - "out", - "!out/{benchmark,tests}", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "keywords": [ - "glob", - "patterns", - "fast", - "implementation" - ], - "devDependencies": { - "@nodelib/fs.macchiato": "^1.0.1", - "@types/glob-parent": "^5.1.0", - "@types/merge2": "^1.1.4", - "@types/micromatch": "^4.0.0", - "@types/mocha": "^5.2.7", - "@types/node": "^14.18.53", - "@types/picomatch": "^2.3.0", - "@types/sinon": "^7.5.0", - "bencho": "^0.1.1", - "eslint": "^6.5.1", - "eslint-config-mrmlnc": "^1.1.0", - "execa": "^7.1.1", - "fast-glob": "^3.0.4", - "fdir": "^6.0.1", - "glob": "^10.0.0", - "hereby": "^1.8.1", - "mocha": "^6.2.1", - "rimraf": "^5.0.0", - "sinon": "^7.5.0", - "snap-shot-it": "^7.9.10", - "typescript": "^4.9.5" - }, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "scripts": { - "clean": "rimraf out", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "test:e2e": "mocha \"out/**/*.e2e.js\" -s 0", - "test:e2e:sync": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(sync\\)\"", - "test:e2e:async": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(async\\)\"", - "test:e2e:stream": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(stream\\)\"", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile -- --sourceMap --watch", - "bench:async": "npm run bench:product:async && npm run bench:regression:async", - "bench:stream": "npm run bench:product:stream && npm run bench:regression:stream", - "bench:sync": "npm run bench:product:sync && npm run bench:regression:sync", - "bench:product": "npm run bench:product:async && npm run bench:product:sync && npm run bench:product:stream", - "bench:product:async": "hereby bench:product:async", - "bench:product:sync": "hereby bench:product:sync", - "bench:product:stream": "hereby bench:product:stream", - "bench:regression": "npm run bench:regression:async && npm run bench:regression:sync && npm run bench:regression:stream", - "bench:regression:async": "hereby bench:regression:async", - "bench:regression:sync": "hereby bench:regression:sync", - "bench:regression:stream": "hereby bench:regression:stream" - } -} diff --git a/node_modules/fastq/.github/dependabot.yml b/node_modules/fastq/.github/dependabot.yml deleted file mode 100644 index 7e7cbe1..0000000 --- a/node_modules/fastq/.github/dependabot.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 - ignore: - - dependency-name: standard - versions: - - 16.0.3 diff --git a/node_modules/fastq/.github/workflows/ci.yml b/node_modules/fastq/.github/workflows/ci.yml deleted file mode 100644 index 69521c4..0000000 --- a/node_modules/fastq/.github/workflows/ci.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: ci - -on: [push, pull_request] - -jobs: - legacy: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: ['0.10', '0.12', 4.x, 6.x, 8.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install --production && npm install tape - - - name: Run tests - run: | - npm run legacy - - test: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [10.x, 12.x, 13.x, 14.x, 15.x, 16.x, 18.x, 20.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Run tests - run: | - npm run test - - types: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: 16 - - - name: Install - run: | - npm install - - - name: Run types tests - run: | - npm run typescript diff --git a/node_modules/fastq/LICENSE b/node_modules/fastq/LICENSE deleted file mode 100644 index 27c7bb4..0000000 --- a/node_modules/fastq/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015-2020, Matteo Collina - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fastq/README.md b/node_modules/fastq/README.md deleted file mode 100644 index af5feee..0000000 --- a/node_modules/fastq/README.md +++ /dev/null @@ -1,306 +0,0 @@ -# fastq - -![ci][ci-url] -[![npm version][npm-badge]][npm-url] - -Fast, in memory work queue. - -Benchmarks (1 million tasks): - -* setImmediate: 812ms -* fastq: 854ms -* async.queue: 1298ms -* neoAsync.queue: 1249ms - -Obtained on node 12.16.1, on a dedicated server. - -If you need zero-overhead series function call, check out -[fastseries](http://npm.im/fastseries). For zero-overhead parallel -function call, check out [fastparallel](http://npm.im/fastparallel). - -[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) - - * Installation - * Usage - * API - * Licence & copyright - -## Install - -`npm i fastq --save` - -## Usage (callback API) - -```js -'use strict' - -const queue = require('fastq')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, arg * 2) -} -``` - -## Usage (promise API) - -```js -const queue = require('fastq').promise(worker, 1) - -async function worker (arg) { - return arg * 2 -} - -async function run () { - const result = await queue.push(42) - console.log('the result is', result) -} - -run() -``` - -### Setting "this" - -```js -'use strict' - -const that = { hello: 'world' } -const queue = require('fastq')(that, worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log(this) - console.log('the result is', result) -}) - -function worker (arg, cb) { - console.log(this) - cb(null, arg * 2) -} -``` - -### Using with TypeScript (callback API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queue, done } from "fastq"; - -type Task = { - id: number -} - -const q: queue = fastq(worker, 1) - -q.push({ id: 42}) - -function worker (arg: Task, cb: done) { - console.log(arg.id) - cb(null) -} -``` - -### Using with TypeScript (promise API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queueAsPromised } from "fastq"; - -type Task = { - id: number -} - -const q: queueAsPromised = fastq.promise(asyncWorker, 1) - -q.push({ id: 42}).catch((err) => console.error(err)) - -async function asyncWorker (arg: Task): Promise { - // No need for a try-catch block, fastq handles errors automatically - console.log(arg.id) -} -``` - -## API - -* fastqueue() -* queue#push() -* queue#unshift() -* queue#pause() -* queue#resume() -* queue#idle() -* queue#length() -* queue#getQueue() -* queue#kill() -* queue#killAndDrain() -* queue#error() -* queue#concurrency -* queue#drain -* queue#empty -* queue#saturated -* fastqueue.promise() - -------------------------------------------------------- - -### fastqueue([that], worker, concurrency) - -Creates a new queue. - -Arguments: - -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - -------------------------------------------------------- - -### queue.push(task, done) - -Add a task at the end of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.unshift(task, done) - -Add a task at the beginning of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.pause() - -Pause the processing of tasks. Currently worked tasks are not -stopped. - -------------------------------------------------------- - -### queue.resume() - -Resume the processing of tasks. - -------------------------------------------------------- - -### queue.idle() - -Returns `false` if there are tasks being processed or waiting to be processed. -`true` otherwise. - -------------------------------------------------------- - -### queue.length() - -Returns the number of tasks waiting to be processed (in the queue). - -------------------------------------------------------- - -### queue.getQueue() - -Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks - -------------------------------------------------------- - -### queue.kill() - -Removes all tasks waiting to be processed, and reset `drain` to an empty -function. - -------------------------------------------------------- - -### queue.killAndDrain() - -Same than `kill` but the `drain` function will be called before reset to empty. - -------------------------------------------------------- - -### queue.error(handler) - -Set a global error handler. `handler(err, task)` will be called -each time a task is completed, `err` will be not null if the task has thrown an error. - -------------------------------------------------------- - -### queue.concurrency - -Property that returns the number of concurrent tasks that could be executed in -parallel. It can be altered at runtime. - -------------------------------------------------------- - -### queue.drain - -Function that will be called when the last -item from the queue has been processed by a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.empty - -Function that will be called when the last -item from the queue has been assigned to a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.saturated - -Function that will be called when the queue hits the concurrency -limit. -It can be altered at runtime. - -------------------------------------------------------- - -### fastqueue.promise([that], worker(arg), concurrency) - -Creates a new queue with `Promise` apis. It also offers all the methods -and properties of the object returned by [`fastqueue`](#fastqueue) with the modified -[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. - -Node v10+ is required to use the promisified version. - -Arguments: -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. It MUST return a `Promise`. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - - -#### queue.push(task) => Promise - -Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.unshift(task) => Promise - -Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.drained() => Promise - -Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - -## License - -ISC - -[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg -[npm-badge]: https://badge.fury.io/js/fastq.svg -[npm-url]: https://badge.fury.io/js/fastq diff --git a/node_modules/fastq/bench.js b/node_modules/fastq/bench.js deleted file mode 100644 index 4eaa829..0000000 --- a/node_modules/fastq/bench.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const max = 1000000 -const fastqueue = require('./')(worker, 1) -const { promisify } = require('util') -const immediate = promisify(setImmediate) -const qPromise = require('./').promise(immediate, 1) -const async = require('async') -const neo = require('neo-async') -const asyncqueue = async.queue(worker, 1) -const neoqueue = neo.queue(worker, 1) - -function bench (func, done) { - const key = max + '*' + func.name - let count = -1 - - console.time(key) - end() - - function end () { - if (++count < max) { - func(end) - } else { - console.timeEnd(key) - if (done) { - done() - } - } - } -} - -function benchFastQ (done) { - fastqueue.push(42, done) -} - -function benchAsyncQueue (done) { - asyncqueue.push(42, done) -} - -function benchNeoQueue (done) { - neoqueue.push(42, done) -} - -function worker (arg, cb) { - setImmediate(cb) -} - -function benchSetImmediate (cb) { - worker(42, cb) -} - -function benchFastQPromise (done) { - qPromise.push(42).then(function () { done() }, done) -} - -function runBench (done) { - async.eachSeries([ - benchSetImmediate, - benchFastQ, - benchNeoQueue, - benchAsyncQueue, - benchFastQPromise - ], bench, done) -} - -runBench(runBench) diff --git a/node_modules/fastq/example.js b/node_modules/fastq/example.js deleted file mode 100644 index 665fdc8..0000000 --- a/node_modules/fastq/example.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var queue = require('./')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, 42 * 2) -} diff --git a/node_modules/fastq/example.mjs b/node_modules/fastq/example.mjs deleted file mode 100644 index 81be789..0000000 --- a/node_modules/fastq/example.mjs +++ /dev/null @@ -1,11 +0,0 @@ -import { promise as queueAsPromised } from './queue.js' - -/* eslint-disable */ - -const queue = queueAsPromised(worker, 1) - -console.log('the result is', await queue.push(42)) - -async function worker (arg) { - return 42 * 2 -} diff --git a/node_modules/fastq/index.d.ts b/node_modules/fastq/index.d.ts deleted file mode 100644 index 327f399..0000000 --- a/node_modules/fastq/index.d.ts +++ /dev/null @@ -1,38 +0,0 @@ -declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue -declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue - -declare namespace fastq { - type worker = (this: C, task: T, cb: fastq.done) => void - type asyncWorker = (this: C, task: T) => Promise - type done = (err: Error | null, result?: R) => void - type errorHandler = (err: Error, task: T) => void - - interface queue { - push(task: T, done?: done): void - unshift(task: T, done?: done): void - pause(): any - resume(): any - running(): number - idle(): boolean - length(): number - getQueue(): T[] - kill(): any - killAndDrain(): any - error(handler: errorHandler): void - concurrency: number - drain(): any - empty: () => void - saturated: () => void - } - - interface queueAsPromised extends queue { - push(task: T): Promise - unshift(task: T): Promise - drained(): Promise - } - - function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised - function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised -} - -export = fastq diff --git a/node_modules/fastq/package.json b/node_modules/fastq/package.json deleted file mode 100644 index 44655bc..0000000 --- a/node_modules/fastq/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "fastq", - "version": "1.17.1", - "description": "Fast, in memory work queue", - "main": "queue.js", - "scripts": { - "lint": "standard --verbose | snazzy", - "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", - "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", - "test:report": "npm run lint && npm run unit:report", - "test": "npm run lint && npm run unit", - "typescript": "tsc --project ./test/tsconfig.json", - "legacy": "tape test/test.js" - }, - "pre-commit": [ - "test", - "typescript" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/fastq.git" - }, - "keywords": [ - "fast", - "queue", - "async", - "worker" - ], - "author": "Matteo Collina ", - "license": "ISC", - "bugs": { - "url": "https://github.com/mcollina/fastq/issues" - }, - "homepage": "https://github.com/mcollina/fastq#readme", - "devDependencies": { - "async": "^3.1.0", - "neo-async": "^2.6.1", - "nyc": "^15.0.0", - "pre-commit": "^1.2.2", - "snazzy": "^9.0.0", - "standard": "^16.0.0", - "tape": "^5.0.0", - "typescript": "^5.0.4" - }, - "dependencies": { - "reusify": "^1.0.4" - }, - "standard": { - "ignore": [ - "example.mjs" - ] - } -} diff --git a/node_modules/fastq/queue.js b/node_modules/fastq/queue.js deleted file mode 100644 index a9d0fa9..0000000 --- a/node_modules/fastq/queue.js +++ /dev/null @@ -1,311 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var reusify = require('reusify') - -function fastqueue (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - if (!(_concurrency >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - - var cache = reusify(Task) - var queueHead = null - var queueTail = null - var _running = 0 - var errorHandler = null - - var self = { - push: push, - drain: noop, - saturated: noop, - pause: pause, - paused: false, - - get concurrency () { - return _concurrency - }, - set concurrency (value) { - if (!(value >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - _concurrency = value - - if (self.paused) return - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - }, - - running: running, - resume: resume, - idle: idle, - length: length, - getQueue: getQueue, - unshift: unshift, - empty: noop, - kill: kill, - killAndDrain: killAndDrain, - error: error - } - - return self - - function running () { - return _running - } - - function pause () { - self.paused = true - } - - function length () { - var current = queueHead - var counter = 0 - - while (current) { - current = current.next - counter++ - } - - return counter - } - - function getQueue () { - var current = queueHead - var tasks = [] - - while (current) { - tasks.push(current.value) - current = current.next - } - - return tasks - } - - function resume () { - if (!self.paused) return - self.paused = false - if (queueHead === null) { - _running++ - release() - return - } - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - } - - function idle () { - return _running === 0 && self.length() === 0 - } - - function push (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueTail) { - queueTail.next = current - queueTail = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function unshift (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueHead) { - current.next = queueHead - queueHead = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function release (holder) { - if (holder) { - cache.release(holder) - } - var next = queueHead - if (next && _running <= _concurrency) { - if (!self.paused) { - if (queueTail === queueHead) { - queueTail = null - } - queueHead = next.next - next.next = null - worker.call(context, next.value, next.worked) - if (queueTail === null) { - self.empty() - } - } else { - _running-- - } - } else if (--_running === 0) { - self.drain() - } - } - - function kill () { - queueHead = null - queueTail = null - self.drain = noop - } - - function killAndDrain () { - queueHead = null - queueTail = null - self.drain() - self.drain = noop - } - - function error (handler) { - errorHandler = handler - } -} - -function noop () {} - -function Task () { - this.value = null - this.callback = noop - this.next = null - this.release = noop - this.context = null - this.errorHandler = null - - var self = this - - this.worked = function worked (err, result) { - var callback = self.callback - var errorHandler = self.errorHandler - var val = self.value - self.value = null - self.callback = noop - if (self.errorHandler) { - errorHandler(err, val) - } - callback.call(self.context, err, result) - self.release(self) - } -} - -function queueAsPromised (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - function asyncWrapper (arg, cb) { - worker.call(this, arg) - .then(function (res) { - cb(null, res) - }, cb) - } - - var queue = fastqueue(context, asyncWrapper, _concurrency) - - var pushCb = queue.push - var unshiftCb = queue.unshift - - queue.push = push - queue.unshift = unshift - queue.drained = drained - - return queue - - function push (value) { - var p = new Promise(function (resolve, reject) { - pushCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function unshift (value) { - var p = new Promise(function (resolve, reject) { - unshiftCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function drained () { - if (queue.idle()) { - return new Promise(function (resolve) { - resolve() - }) - } - - var previousDrain = queue.drain - - var p = new Promise(function (resolve) { - queue.drain = function () { - previousDrain() - resolve() - } - }) - - return p - } -} - -module.exports = fastqueue -module.exports.promise = queueAsPromised diff --git a/node_modules/fastq/test/example.ts b/node_modules/fastq/test/example.ts deleted file mode 100644 index a47d441..0000000 --- a/node_modules/fastq/test/example.ts +++ /dev/null @@ -1,83 +0,0 @@ -import * as fastq from '../' -import { promise as queueAsPromised } from '../' - -// Basic example - -const queue = fastq(worker, 1) - -queue.push('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.push('push without cb') - -queue.concurrency - -queue.drain() - -queue.empty = () => undefined - -console.log('the queue tasks are', queue.getQueue()) - -queue.idle() - -queue.kill() - -queue.killAndDrain() - -queue.length - -queue.pause() - -queue.resume() - -queue.running() - -queue.saturated = () => undefined - -queue.unshift('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.unshift('unshift without cb') - -function worker(task: any, cb: fastq.done) { - cb(null, 'hello ' + task) -} - -// Generics example - -interface GenericsContext { - base: number; -} - -const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) - -genericsQueue.push(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -genericsQueue.unshift(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { - cb(null, 'the meaning of life is ' + (this.base * task)) -} - -const queue2 = queueAsPromised(asyncWorker, 1) - -async function asyncWorker(task: any) { - return 'hello ' + task -} - -async function run () { - await queue.push(42) - await queue.unshift(42) -} - -run() diff --git a/node_modules/fastq/test/promise.js b/node_modules/fastq/test/promise.js deleted file mode 100644 index fe014ff..0000000 --- a/node_modules/fastq/test/promise.js +++ /dev/null @@ -1,248 +0,0 @@ -'use strict' - -const test = require('tape') -const buildQueue = require('../').promise -const { promisify } = require('util') -const sleep = promisify(setTimeout) -const immediate = promisify(setImmediate) - -test('concurrency', function (t) { - t.plan(2) - t.throws(buildQueue.bind(null, worker, 0)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - async function worker (arg) { - return true - } -}) - -test('worker execution', async function (t) { - const queue = buildQueue(worker, 1) - - const result = await queue.push(42) - - t.equal(result, true, 'result matches') - - async function worker (arg) { - t.equal(arg, 42) - return true - } -}) - -test('limit', async function (t) { - const queue = buildQueue(worker, 1) - - const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) - t.equal(res1, 10, 'the result matches') - t.equal(res2, 0, 'the result matches') - - async function worker (arg) { - await sleep(arg) - return arg - } -}) - -test('multiple executions', async function (t) { - const queue = buildQueue(worker, 1) - const toExec = [1, 2, 3, 4, 5] - const expected = ['a', 'b', 'c', 'd', 'e'] - let count = 0 - - await Promise.all(toExec.map(async function (task, i) { - const result = await queue.push(task) - t.equal(result, expected[i], 'the result matches') - })) - - async function worker (arg) { - t.equal(arg, toExec[count], 'arg matches') - return expected[count++] - } -}) - -test('drained', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length) - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length * 2) -}) - -test('drained with exception should not throw', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - - async function worker () { - throw new Error('foo') - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() -}) - -test('drained with drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function () { - queue.push() - }) - - await queue.drained() - - t.equal(count, toExec.length) - t.equal(drainCalled, true) -}) - -test('drained while idle should resolve', async function (t) { - const queue = buildQueue(worker, 2) - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() -}) - -test('drained while idle should not call the drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() - - t.equal(drainCalled, false) -}) - -test('set this', async function (t) { - t.plan(1) - const that = {} - const queue = buildQueue(that, worker, 1) - - await queue.push(42) - - async function worker (arg) { - t.equal(this, that, 'this matches') - } -}) - -test('unshift', async function (t) { - const queue = buildQueue(worker, 1) - const expected = [1, 2, 3, 4] - - await Promise.all([ - queue.push(1), - queue.push(4), - queue.unshift(3), - queue.unshift(2) - ]) - - t.is(expected.length, 0) - - async function worker (arg) { - t.equal(expected.shift(), arg, 'tasks come in order') - } -}) - -test('push with worker throwing error', async function (t) { - t.plan(5) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - try { - await q.push(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('unshift with worker throwing error', async function (t) { - t.plan(2) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - try { - await q.unshift(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('no unhandledRejection (push)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.push(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) - -test('no unhandledRejection (unshift)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.unshift(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) diff --git a/node_modules/fastq/test/test.js b/node_modules/fastq/test/test.js deleted file mode 100644 index ceed7a7..0000000 --- a/node_modules/fastq/test/test.js +++ /dev/null @@ -1,642 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var test = require('tape') -var buildQueue = require('../') - -test('concurrency', function (t) { - t.plan(6) - t.throws(buildQueue.bind(null, worker, 0)) - t.throws(buildQueue.bind(null, worker, NaN)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - var queue = buildQueue(worker, 1) - t.throws(function () { - queue.concurrency = 0 - }) - t.throws(function () { - queue.concurrency = NaN - }) - t.doesNotThrow(function () { - queue.concurrency = 2 - }) - - function worker (arg, cb) { - cb(null, true) - } -}) - -test('worker execution', function (t) { - t.plan(3) - - var queue = buildQueue(worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - function worker (arg, cb) { - t.equal(arg, 42) - cb(null, true) - } -}) - -test('limit', function (t) { - t.plan(4) - - var expected = [10, 0] - var queue = buildQueue(worker, 1) - - queue.push(10, result) - queue.push(0, result) - - function result (err, arg) { - t.error(err, 'no error') - t.equal(arg, expected.shift(), 'the result matches') - } - - function worker (arg, cb) { - setTimeout(cb, arg, null, arg) - } -}) - -test('multiple executions', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - toExec.forEach(function (task) { - queue.push(task, done) - }) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('multiple executions, one after another', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - queue.push(toExec[0], done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - if (count < toExec.length) { - queue.push(toExec[count], done) - } - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('set this', function (t) { - t.plan(3) - - var that = {} - var queue = buildQueue(that, worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(this, that, 'this matches') - }) - - function worker (arg, cb) { - t.equal(this, that, 'this matches') - cb(null, true) - } -}) - -test('drain', function (t) { - t.plan(4) - - var queue = buildQueue(worker, 1) - var worked = false - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.drain = function () { - t.equal(true, worked, 'drained') - } - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - setImmediate(cb, null, true) - } -}) - -test('pause && resume', function (t) { - t.plan(13) - - var queue = buildQueue(worker, 1) - var worked = false - var expected = [42, 24] - - t.notOk(queue.paused, 'it should not be paused') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - - queue.resume() - queue.pause() - queue.resume() - queue.resume() // second resume is a no-op - - function worker (arg, cb) { - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - worked = true - process.nextTick(function () { cb(null, true) }) - } -}) - -test('pause in flight && resume', function (t) { - t.plan(16) - - var queue = buildQueue(worker, 1) - var expected = [42, 24, 12] - - t.notOk(queue.paused, 'it should not be paused') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.ok(queue.paused, 'it should be paused') - process.nextTick(function () { - queue.resume() - queue.pause() - queue.resume() - }) - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.push(12, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.pause() - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - process.nextTick(function () { cb(null, true) }) - } -}) - -test('altering concurrency', function (t) { - t.plan(24) - - var queue = buildQueue(worker, 1) - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - queue.pause() - - queue.concurrency = 3 // concurrency changes are ignored while paused - queue.concurrency = 2 - - queue.resume() - - t.equal(queue.running(), 2, '2 jobs running') - - queue.concurrency = 3 - - t.equal(queue.running(), 3, '3 jobs running') - - queue.concurrency = 1 - - t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - function workDone (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('idle()', function (t) { - t.plan(12) - - var queue = buildQueue(worker, 1) - - t.ok(queue.idle(), 'queue is idle') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.idle(), 'queue is not idle') - }) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - // it will go idle after executing this function - setImmediate(function () { - t.ok(queue.idle(), 'queue is now idle') - }) - }) - - t.notOk(queue.idle(), 'queue is not idle') - - function worker (arg, cb) { - t.notOk(queue.idle(), 'queue is not idle') - t.equal(arg, 42) - setImmediate(cb, null, true) - } -}) - -test('saturated', function (t) { - t.plan(9) - - var queue = buildQueue(worker, 1) - var preworked = 0 - var worked = 0 - - queue.saturated = function () { - t.pass('saturated') - t.equal(preworked, 1, 'started 1 task') - t.equal(worked, 0, 'worked zero task') - } - - queue.push(42, done) - queue.push(42, done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.equal(arg, 42) - preworked++ - setImmediate(function () { - worked++ - cb(null, true) - }) - } -}) - -test('length', function (t) { - t.plan(7) - - var queue = buildQueue(worker, 1) - - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 1, 'one task waiting') - queue.push(42, done) - t.equal(queue.length(), 2, 'two tasks waiting') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('getQueue', function (t) { - t.plan(10) - - var queue = buildQueue(worker, 1) - - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 1, 'one task waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - queue.push(43, done) - t.equal(queue.getQueue().length, 2, 'two tasks waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - t.equal(queue.getQueue()[1], 43, 'should be equal') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift', function (t) { - t.plan(8) - - var queue = buildQueue(worker, 1) - var expected = [1, 2, 3, 4] - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.unshift(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('push && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.push(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('kill', function (t) { - t.plan(5) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.fail('drain should never be called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.kill() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('killAndDrain', function (t) { - t.plan(6) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.pass('drain has been called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.killAndDrain() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('pause && idle', function (t) { - t.plan(11) - - var queue = buildQueue(worker, 1) - var worked = false - - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.idle(), 'should be idle') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - t.notOk(queue.idle(), 'should not be idle') - - queue.resume() - - t.notOk(queue.paused, 'it should not be paused') - t.notOk(queue.idle(), 'it should not be idle') - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - process.nextTick(cb.bind(null, null, true)) - process.nextTick(function () { - t.ok(queue.idle(), 'is should be idle') - }) - } -}) - -test('push without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.push(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('unshift without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.unshift(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('push with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.push(42, function (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('unshift with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.unshift(42, function (err) { - t.ok(err instanceof Error, 'unshift callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('pause/resume should trigger drain event', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - queue.pause() - queue.drain = function () { - t.pass('drain should be called') - } - - function worker (arg, cb) { - cb(null, true) - } - - queue.resume() -}) diff --git a/node_modules/fastq/test/tsconfig.json b/node_modules/fastq/test/tsconfig.json deleted file mode 100644 index 66e16e9..0000000 --- a/node_modules/fastq/test/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "noEmit": true, - "strict": true - }, - "files": [ - "./example.ts" - ] -} diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/fill-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md deleted file mode 100644 index 8d756fe..0000000 --- a/node_modules/fill-range/README.md +++ /dev/null @@ -1,237 +0,0 @@ -# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) - -> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save fill-range -``` - -## Usage - -Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. - -```js -const fill = require('fill-range'); -// fill(from, to[, step, options]); - -console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] -console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 -``` - -**Params** - -* `from`: **{String|Number}** the number or letter to start with -* `to`: **{String|Number}** the number or letter to end with -* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. -* `options`: **{Object|Function}**: See all available [options](#options) - -## Examples - -By default, an array of values is returned. - -**Alphabetical ranges** - -```js -console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] -console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] -``` - -**Numerical ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] -``` - -**Negative ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] -console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] -``` - -**Steps (increments)** - -```js -// numerical ranges with increments -console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] -console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] -console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] - -// alphabetical ranges with increments -console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] -``` - -## Options - -### options.step - -**Type**: `number` (formatted as a string or number) - -**Default**: `undefined` - -**Description**: The increment to use for the range. Can be used with letters or numbers. - -**Example(s)** - -```js -// numbers -console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] -console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] -console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] - -// letters -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] -console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] -``` - -### options.strictRanges - -**Type**: `boolean` - -**Default**: `false` - -**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. - -**Example(s)** - -The following are all invalid: - -```js -fill('1.1', '2'); // decimals not supported in ranges -fill('a', '2'); // incompatible range values -fill(1, 10, 'foo'); // invalid "step" argument -``` - -### options.stringify - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Cast all returned values to strings. By default, integers are returned as numbers. - -**Example(s)** - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] -``` - -### options.toRegex - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Create a regex-compatible source string, instead of expanding values to an array. - -**Example(s)** - -```js -// alphabetical range -console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' -// alphabetical with step -console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' -// numerical range -console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' -// numerical range with zero padding -console.log(fill('000001', '100000', { toRegex: true })); -//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' -``` - -### options.transform - -**Type**: `function` - -**Default**: `undefined` - -**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. - -**Example(s)** - -```js -// add zero padding -console.log(fill(1, 5, value => String(value).padStart(4, '0'))); -//=> ['0001', '0002', '0003', '0004', '0005'] -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 116 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [paulmillr](https://github.com/paulmillr) | -| 2 | [realityking](https://github.com/realityking) | -| 2 | [bluelovers](https://github.com/bluelovers) | -| 1 | [edorivai](https://github.com/edorivai) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js deleted file mode 100644 index ddb212e..0000000 --- a/node_modules/fill-range/index.js +++ /dev/null @@ -1,248 +0,0 @@ -/*! - * fill-range - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Licensed under the MIT License. - */ - -'use strict'; - -const util = require('util'); -const toRegexRange = require('to-regex-range'); - -const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); - -const transform = toNumber => { - return value => toNumber === true ? Number(value) : String(value); -}; - -const isValidValue = value => { - return typeof value === 'number' || (typeof value === 'string' && value !== ''); -}; - -const isNumber = num => Number.isInteger(+num); - -const zeros = input => { - let value = `${input}`; - let index = -1; - if (value[0] === '-') value = value.slice(1); - if (value === '0') return false; - while (value[++index] === '0'); - return index > 0; -}; - -const stringify = (start, end, options) => { - if (typeof start === 'string' || typeof end === 'string') { - return true; - } - return options.stringify === true; -}; - -const pad = (input, maxLength, toNumber) => { - if (maxLength > 0) { - let dash = input[0] === '-' ? '-' : ''; - if (dash) input = input.slice(1); - input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); - } - if (toNumber === false) { - return String(input); - } - return input; -}; - -const toMaxLen = (input, maxLength) => { - let negative = input[0] === '-' ? '-' : ''; - if (negative) { - input = input.slice(1); - maxLength--; - } - while (input.length < maxLength) input = '0' + input; - return negative ? ('-' + input) : input; -}; - -const toSequence = (parts, options, maxLen) => { - parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - - let prefix = options.capture ? '' : '?:'; - let positives = ''; - let negatives = ''; - let result; - - if (parts.positives.length) { - positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); - } - - if (parts.negatives.length) { - negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; - } - - if (positives && negatives) { - result = `${positives}|${negatives}`; - } else { - result = positives || negatives; - } - - if (options.wrap) { - return `(${prefix}${result})`; - } - - return result; -}; - -const toRange = (a, b, isNumbers, options) => { - if (isNumbers) { - return toRegexRange(a, b, { wrap: false, ...options }); - } - - let start = String.fromCharCode(a); - if (a === b) return start; - - let stop = String.fromCharCode(b); - return `[${start}-${stop}]`; -}; - -const toRegex = (start, end, options) => { - if (Array.isArray(start)) { - let wrap = options.wrap === true; - let prefix = options.capture ? '' : '?:'; - return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); - } - return toRegexRange(start, end, options); -}; - -const rangeError = (...args) => { - return new RangeError('Invalid range arguments: ' + util.inspect(...args)); -}; - -const invalidRange = (start, end, options) => { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; -}; - -const invalidStep = (step, options) => { - if (options.strictRanges === true) { - throw new TypeError(`Expected step "${step}" to be a number`); - } - return []; -}; - -const fillNumbers = (start, end, step = 1, options = {}) => { - let a = Number(start); - let b = Number(end); - - if (!Number.isInteger(a) || !Number.isInteger(b)) { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; - } - - // fix negative zero - if (a === 0) a = 0; - if (b === 0) b = 0; - - let descending = a > b; - let startString = String(start); - let endString = String(end); - let stepString = String(step); - step = Math.max(Math.abs(step), 1); - - let padded = zeros(startString) || zeros(endString) || zeros(stepString); - let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; - let toNumber = padded === false && stringify(start, end, options) === false; - let format = options.transform || transform(toNumber); - - if (options.toRegex && step === 1) { - return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); - } - - let parts = { negatives: [], positives: [] }; - let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - if (options.toRegex === true && step > 1) { - push(a); - } else { - range.push(pad(format(a, index), maxLen, toNumber)); - } - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return step > 1 - ? toSequence(parts, options, maxLen) - : toRegex(range, null, { wrap: false, ...options }); - } - - return range; -}; - -const fillLetters = (start, end, step = 1, options = {}) => { - if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { - return invalidRange(start, end, options); - } - - let format = options.transform || (val => String.fromCharCode(val)); - let a = `${start}`.charCodeAt(0); - let b = `${end}`.charCodeAt(0); - - let descending = a > b; - let min = Math.min(a, b); - let max = Math.max(a, b); - - if (options.toRegex && step === 1) { - return toRange(min, max, false, options); - } - - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - range.push(format(a, index)); - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return toRegex(range, null, { wrap: false, options }); - } - - return range; -}; - -const fill = (start, end, step, options = {}) => { - if (end == null && isValidValue(start)) { - return [start]; - } - - if (!isValidValue(start) || !isValidValue(end)) { - return invalidRange(start, end, options); - } - - if (typeof step === 'function') { - return fill(start, end, 1, { transform: step }); - } - - if (isObject(step)) { - return fill(start, end, 0, step); - } - - let opts = { ...options }; - if (opts.capture === true) opts.wrap = true; - step = step || opts.step || 1; - - if (!isNumber(step)) { - if (step != null && !isObject(step)) return invalidStep(step, opts); - return fill(start, end, 1, step); - } - - if (isNumber(start) && isNumber(end)) { - return fillNumbers(start, end, step, opts); - } - - return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); -}; - -module.exports = fill; diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json deleted file mode 100644 index 582357f..0000000 --- a/node_modules/fill-range/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "fill-range", - "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", - "version": "7.1.1", - "homepage": "https://github.com/jonschlinkert/fill-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Edo Rivai (edo.rivai.nl)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Paul Miller (paulmillr.com)", - "Rouven Weßling (www.rouvenwessling.de)", - "(https://github.com/wtgtybhertgeghgtwtg)" - ], - "repository": "jonschlinkert/fill-range", - "bugs": { - "url": "https://github.com/jonschlinkert/fill-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "devDependencies": { - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1", - "nyc": "^15.1.0" - }, - "keywords": [ - "alpha", - "alphabetical", - "array", - "bash", - "brace", - "expand", - "expansion", - "fill", - "glob", - "match", - "matches", - "matching", - "number", - "numerical", - "range", - "ranges", - "regex", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/fs-extra/LICENSE b/node_modules/fs-extra/LICENSE deleted file mode 100644 index 93546df..0000000 --- a/node_modules/fs-extra/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs-extra/README.md b/node_modules/fs-extra/README.md deleted file mode 100644 index 245de66..0000000 --- a/node_modules/fs-extra/README.md +++ /dev/null @@ -1,292 +0,0 @@ -Node.js: fs-extra -================= - -`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`. - -[![npm Package](https://img.shields.io/npm/v/fs-extra.svg)](https://www.npmjs.org/package/fs-extra) -[![License](https://img.shields.io/npm/l/fs-extra.svg)](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE) -[![build status](https://img.shields.io/github/actions/workflow/status/jprichardson/node-fs-extra/ci.yml?branch=master)](https://github.com/jprichardson/node-fs-extra/actions/workflows/ci.yml?query=branch%3Amaster) -[![downloads per month](http://img.shields.io/npm/dm/fs-extra.svg)](https://www.npmjs.org/package/fs-extra) -[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com) - -Why? ----- - -I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects. - - - - -Installation ------------- - - npm install fs-extra - - - -Usage ------ - -### CommonJS - -`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed. - -You don't ever need to include the original `fs` module again: - -```js -const fs = require('fs') // this is no longer necessary -``` - -you can now do this: - -```js -const fs = require('fs-extra') -``` - -or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want -to name your `fs` variable `fse` like so: - -```js -const fse = require('fs-extra') -``` - -you can also keep both, but it's redundant: - -```js -const fs = require('fs') -const fse = require('fs-extra') -``` - -### ESM - -There is also an `fs-extra/esm` import, that supports both default and named exports. However, note that `fs` methods are not included in `fs-extra/esm`; you still need to import `fs` and/or `fs/promises` seperately: - -```js -import { readFileSync } from 'fs' -import { readFile } from 'fs/promises' -import { outputFile, outputFileSync } from 'fs-extra/esm' -``` - -Default exports are supported: - -```js -import fs from 'fs' -import fse from 'fs-extra/esm' -// fse.readFileSync is not a function; must use fs.readFileSync -``` - -but you probably want to just use regular `fs-extra` instead of `fs-extra/esm` for default exports: - -```js -import fs from 'fs-extra' -// both fs and fs-extra methods are defined -``` - -Sync vs Async vs Async/Await -------------- -Most methods are async by default. All async methods will return a promise if the callback isn't passed. - -Sync methods on the other hand will throw if an error occurs. - -Also Async/Await will throw an error if one occurs. - -Example: - -```js -const fs = require('fs-extra') - -// Async with promises: -fs.copy('/tmp/myfile', '/tmp/mynewfile') - .then(() => console.log('success!')) - .catch(err => console.error(err)) - -// Async with callbacks: -fs.copy('/tmp/myfile', '/tmp/mynewfile', err => { - if (err) return console.error(err) - console.log('success!') -}) - -// Sync: -try { - fs.copySync('/tmp/myfile', '/tmp/mynewfile') - console.log('success!') -} catch (err) { - console.error(err) -} - -// Async/Await: -async function copyFiles () { - try { - await fs.copy('/tmp/myfile', '/tmp/mynewfile') - console.log('success!') - } catch (err) { - console.error(err) - } -} - -copyFiles() -``` - - -Methods -------- - -### Async - -- [copy](docs/copy.md) -- [emptyDir](docs/emptyDir.md) -- [ensureFile](docs/ensureFile.md) -- [ensureDir](docs/ensureDir.md) -- [ensureLink](docs/ensureLink.md) -- [ensureSymlink](docs/ensureSymlink.md) -- [mkdirp](docs/ensureDir.md) -- [mkdirs](docs/ensureDir.md) -- [move](docs/move.md) -- [outputFile](docs/outputFile.md) -- [outputJson](docs/outputJson.md) -- [pathExists](docs/pathExists.md) -- [readJson](docs/readJson.md) -- [remove](docs/remove.md) -- [writeJson](docs/writeJson.md) - -### Sync - -- [copySync](docs/copy-sync.md) -- [emptyDirSync](docs/emptyDir-sync.md) -- [ensureFileSync](docs/ensureFile-sync.md) -- [ensureDirSync](docs/ensureDir-sync.md) -- [ensureLinkSync](docs/ensureLink-sync.md) -- [ensureSymlinkSync](docs/ensureSymlink-sync.md) -- [mkdirpSync](docs/ensureDir-sync.md) -- [mkdirsSync](docs/ensureDir-sync.md) -- [moveSync](docs/move-sync.md) -- [outputFileSync](docs/outputFile-sync.md) -- [outputJsonSync](docs/outputJson-sync.md) -- [pathExistsSync](docs/pathExists-sync.md) -- [readJsonSync](docs/readJson-sync.md) -- [removeSync](docs/remove-sync.md) -- [writeJsonSync](docs/writeJson-sync.md) - - -**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md) - -### What happened to `walk()` and `walkSync()`? - -They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync). - - -Third Party ------------ - -### CLI - -[fse-cli](https://www.npmjs.com/package/@atao60/fse-cli) allows you to run `fs-extra` from a console or from [npm](https://www.npmjs.com) scripts. - -### TypeScript - -If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra - - -### File / Directory Watching - -If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar). - -### Obtain Filesystem (Devices, Partitions) Information - -[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system. - -### Misc. - -- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug). -- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls. - - - -Hacking on fs-extra -------------------- - -Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project -uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you, -you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`. - -[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) - -What's needed? -- First, take a look at existing issues. Those are probably going to be where the priority lies. -- More tests for edge cases. Specifically on different platforms. There can never be enough tests. -- Improve test coverage. - -Note: If you make any big changes, **you should definitely file an issue for discussion first.** - -### Running the Test Suite - -fs-extra contains hundreds of tests. - -- `npm run lint`: runs the linter ([standard](http://standardjs.com/)) -- `npm run unit`: runs the unit tests -- `npm run unit-esm`: runs tests for `fs-extra/esm` exports -- `npm test`: runs the linter and all tests - -When running unit tests, set the environment variable `CROSS_DEVICE_PATH` to the absolute path of an empty directory on another device (like a thumb drive) to enable cross-device move tests. - - -### Windows - -If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's -because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's -account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7 -However, I didn't have much luck doing this. - -Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows. -I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command: - - net use z: "\\vmware-host\Shared Folders" - -I can then navigate to my `fs-extra` directory and run the tests. - - -Naming ------- - -I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here: - -* https://github.com/jprichardson/node-fs-extra/issues/2 -* https://github.com/flatiron/utile/issues/11 -* https://github.com/ryanmcgrath/wrench-js/issues/29 -* https://github.com/substack/node-mkdirp/issues/17 - -First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes. - -For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc. - -We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`? - -My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too. - -So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`. - - -Credit ------- - -`fs-extra` wouldn't be possible without using the modules from the following authors: - -- [Isaac Shlueter](https://github.com/isaacs) -- [Charlie McConnel](https://github.com/avianflu) -- [James Halliday](https://github.com/substack) -- [Andrew Kelley](https://github.com/andrewrk) - - - - -License -------- - -Licensed under MIT - -Copyright (c) 2011-2017 [JP Richardson](https://github.com/jprichardson) - -[1]: http://nodejs.org/docs/latest/api/fs.html - - -[jsonfile]: https://github.com/jprichardson/node-jsonfile diff --git a/node_modules/fs-extra/lib/copy/copy-sync.js b/node_modules/fs-extra/lib/copy/copy-sync.js deleted file mode 100644 index 8bc6011..0000000 --- a/node_modules/fs-extra/lib/copy/copy-sync.js +++ /dev/null @@ -1,161 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const path = require('path') -const mkdirsSync = require('../mkdirs').mkdirsSync -const utimesMillisSync = require('../util/utimes').utimesMillisSync -const stat = require('../util/stat') - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0002' - ) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - if (opts.filter && !opts.filter(srcItem, destItem)) return - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts) - return getStats(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync diff --git a/node_modules/fs-extra/lib/copy/copy.js b/node_modules/fs-extra/lib/copy/copy.js deleted file mode 100644 index 6304b02..0000000 --- a/node_modules/fs-extra/lib/copy/copy.js +++ /dev/null @@ -1,177 +0,0 @@ -'use strict' - -const fs = require('../fs') -const path = require('path') -const { mkdirs } = require('../mkdirs') -const { pathExists } = require('../path-exists') -const { utimesMillis } = require('../util/utimes') -const stat = require('../util/stat') - -async function copy (src, dest, opts = {}) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - process.emitWarning( - 'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' + - '\tsee https://github.com/jprichardson/node-fs-extra/issues/269', - 'Warning', 'fs-extra-WARN0001' - ) - } - - const { srcStat, destStat } = await stat.checkPaths(src, dest, 'copy', opts) - - await stat.checkParentPaths(src, srcStat, dest, 'copy') - - const include = await runFilter(src, dest, opts) - - if (!include) return - - // check if the parent of dest exists, and create it if it doesn't exist - const destParent = path.dirname(dest) - const dirExists = await pathExists(destParent) - if (!dirExists) { - await mkdirs(destParent) - } - - await getStatsAndPerformCopy(destStat, src, dest, opts) -} - -async function runFilter (src, dest, opts) { - if (!opts.filter) return true - return opts.filter(src, dest) -} - -async function getStatsAndPerformCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? fs.stat : fs.lstat - const srcStat = await statFn(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - - if ( - srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice() - ) return onFile(srcStat, destStat, src, dest, opts) - - if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) - if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`) - if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`) - throw new Error(`Unknown file: ${src}`) -} - -async function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - - if (opts.overwrite) { - await fs.unlink(dest) - return copyFile(srcStat, src, dest, opts) - } - if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -async function copyFile (srcStat, src, dest, opts) { - await fs.copyFile(src, dest) - if (opts.preserveTimestamps) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcStat.mode)) { - await makeFileWritable(dest, srcStat.mode) - } - - // Set timestamps and mode correspondingly - - // Note that The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await fs.stat(src) - await utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime) - } - - return fs.chmod(dest, srcStat.mode) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return fs.chmod(dest, srcMode | 0o200) -} - -async function onDir (srcStat, destStat, src, dest, opts) { - // the dest directory might not exist, create it - if (!destStat) { - await fs.mkdir(dest) - } - - const items = await fs.readdir(src) - - // loop through the files in the current directory to copy everything - await Promise.all(items.map(async item => { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - - // skip the item if it is matches by the filter function - const include = await runFilter(srcItem, destItem, opts) - if (!include) return - - const { destStat } = await stat.checkPaths(srcItem, destItem, 'copy', opts) - - // If the item is a copyable file, `getStatsAndPerformCopy` will copy it - // If the item is a directory, `getStatsAndPerformCopy` will call `onDir` recursively - return getStatsAndPerformCopy(destStat, srcItem, destItem, opts) - })) - - if (!destStat) { - await fs.chmod(dest, srcStat.mode) - } -} - -async function onLink (destStat, src, dest, opts) { - let resolvedSrc = await fs.readlink(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - if (!destStat) { - return fs.symlink(resolvedSrc, dest) - } - - let resolvedDest = null - try { - resolvedDest = await fs.readlink(dest) - } catch (e) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (e.code === 'EINVAL' || e.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest) - throw e - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - - // copy the link - await fs.unlink(dest) - return fs.symlink(resolvedSrc, dest) -} - -module.exports = copy diff --git a/node_modules/fs-extra/lib/copy/index.js b/node_modules/fs-extra/lib/copy/index.js deleted file mode 100644 index 2e31d27..0000000 --- a/node_modules/fs-extra/lib/copy/index.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -module.exports = { - copy: u(require('./copy')), - copySync: require('./copy-sync') -} diff --git a/node_modules/fs-extra/lib/empty/index.js b/node_modules/fs-extra/lib/empty/index.js deleted file mode 100644 index b4a2e82..0000000 --- a/node_modules/fs-extra/lib/empty/index.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const fs = require('../fs') -const path = require('path') -const mkdir = require('../mkdirs') -const remove = require('../remove') - -const emptyDir = u(async function emptyDir (dir) { - let items - try { - items = await fs.readdir(dir) - } catch { - return mkdir.mkdirs(dir) - } - - return Promise.all(items.map(item => remove.remove(path.join(dir, item)))) -}) - -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) - } - - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) -} - -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir -} diff --git a/node_modules/fs-extra/lib/ensure/file.js b/node_modules/fs-extra/lib/ensure/file.js deleted file mode 100644 index a55c2d9..0000000 --- a/node_modules/fs-extra/lib/ensure/file.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const path = require('path') -const fs = require('../fs') -const mkdir = require('../mkdirs') - -async function createFile (file) { - let stats - try { - stats = await fs.stat(file) - } catch { } - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - - let dirStats = null - try { - dirStats = await fs.stat(dir) - } catch (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - await mkdir.mkdirs(dir) - await fs.writeFile(file, '') - return - } else { - throw err - } - } - - if (dirStats.isDirectory()) { - await fs.writeFile(file, '') - } else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - await fs.readdir(dir) - } -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch { } - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} diff --git a/node_modules/fs-extra/lib/ensure/index.js b/node_modules/fs-extra/lib/ensure/index.js deleted file mode 100644 index ecbcdd0..0000000 --- a/node_modules/fs-extra/lib/ensure/index.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -const { createFile, createFileSync } = require('./file') -const { createLink, createLinkSync } = require('./link') -const { createSymlink, createSymlinkSync } = require('./symlink') - -module.exports = { - // file - createFile, - createFileSync, - ensureFile: createFile, - ensureFileSync: createFileSync, - // link - createLink, - createLinkSync, - ensureLink: createLink, - ensureLinkSync: createLinkSync, - // symlink - createSymlink, - createSymlinkSync, - ensureSymlink: createSymlink, - ensureSymlinkSync: createSymlinkSync -} diff --git a/node_modules/fs-extra/lib/ensure/link.js b/node_modules/fs-extra/lib/ensure/link.js deleted file mode 100644 index c3d1c69..0000000 --- a/node_modules/fs-extra/lib/ensure/link.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const path = require('path') -const fs = require('../fs') -const mkdir = require('../mkdirs') -const { pathExists } = require('../path-exists') -const { areIdentical } = require('../util/stat') - -async function createLink (srcpath, dstpath) { - let dstStat - try { - dstStat = await fs.lstat(dstpath) - } catch { - // ignore error - } - - let srcStat - try { - srcStat = await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - if (dstStat && areIdentical(srcStat, dstStat)) return - - const dir = path.dirname(dstpath) - - const dirExists = await pathExists(dir) - - if (!dirExists) { - await mkdir.mkdirs(dir) - } - - await fs.link(srcpath, dstpath) -} - -function createLinkSync (srcpath, dstpath) { - let dstStat - try { - dstStat = fs.lstatSync(dstpath) - } catch {} - - try { - const srcStat = fs.lstatSync(srcpath) - if (dstStat && areIdentical(srcStat, dstStat)) return - } catch (err) { - err.message = err.message.replace('lstat', 'ensureLink') - throw err - } - - const dir = path.dirname(dstpath) - const dirExists = fs.existsSync(dir) - if (dirExists) return fs.linkSync(srcpath, dstpath) - mkdir.mkdirsSync(dir) - - return fs.linkSync(srcpath, dstpath) -} - -module.exports = { - createLink: u(createLink), - createLinkSync -} diff --git a/node_modules/fs-extra/lib/ensure/symlink-paths.js b/node_modules/fs-extra/lib/ensure/symlink-paths.js deleted file mode 100644 index 85dda47..0000000 --- a/node_modules/fs-extra/lib/ensure/symlink-paths.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -const path = require('path') -const fs = require('../fs') -const { pathExists } = require('../path-exists') - -const u = require('universalify').fromPromise - -/** - * Function that returns two types of paths, one relative to symlink, and one - * relative to the current working directory. Checks if path is absolute or - * relative. If the path is relative, this function checks if the path is - * relative to symlink or relative to current working directory. This is an - * initiative to find a smarter `srcpath` to supply when building symlinks. - * This allows you to determine which path to use out of one of three possible - * types of source paths. The first is an absolute path. This is detected by - * `path.isAbsolute()`. When an absolute path is provided, it is checked to - * see if it exists. If it does it's used, if not an error is returned - * (callback)/ thrown (sync). The other two options for `srcpath` are a - * relative url. By default Node's `fs.symlink` works by creating a symlink - * using `dstpath` and expects the `srcpath` to be relative to the newly - * created symlink. If you provide a `srcpath` that does not exist on the file - * system it results in a broken symlink. To minimize this, the function - * checks to see if the 'relative to symlink' source file exists, and if it - * does it will use it. If it does not, it checks if there's a file that - * exists that is relative to the current working directory, if does its used. - * This preserves the expectations of the original fs.symlink spec and adds - * the ability to pass in `relative to current working direcotry` paths. - */ - -async function symlinkPaths (srcpath, dstpath) { - if (path.isAbsolute(srcpath)) { - try { - await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - throw err - } - - return { - toCwd: srcpath, - toDst: srcpath - } - } - - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - - const exists = await pathExists(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } - - try { - await fs.lstat(srcpath) - } catch (err) { - err.message = err.message.replace('lstat', 'ensureSymlink') - throw err - } - - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } -} - -function symlinkPathsSync (srcpath, dstpath) { - if (path.isAbsolute(srcpath)) { - const exists = fs.existsSync(srcpath) - if (!exists) throw new Error('absolute srcpath does not exist') - return { - toCwd: srcpath, - toDst: srcpath - } - } - - const dstdir = path.dirname(dstpath) - const relativeToDst = path.join(dstdir, srcpath) - const exists = fs.existsSync(relativeToDst) - if (exists) { - return { - toCwd: relativeToDst, - toDst: srcpath - } - } - - const srcExists = fs.existsSync(srcpath) - if (!srcExists) throw new Error('relative srcpath does not exist') - return { - toCwd: srcpath, - toDst: path.relative(dstdir, srcpath) - } -} - -module.exports = { - symlinkPaths: u(symlinkPaths), - symlinkPathsSync -} diff --git a/node_modules/fs-extra/lib/ensure/symlink-type.js b/node_modules/fs-extra/lib/ensure/symlink-type.js deleted file mode 100644 index 518558e..0000000 --- a/node_modules/fs-extra/lib/ensure/symlink-type.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict' - -const fs = require('../fs') -const u = require('universalify').fromPromise - -async function symlinkType (srcpath, type) { - if (type) return type - - let stats - try { - stats = await fs.lstat(srcpath) - } catch { - return 'file' - } - - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -function symlinkTypeSync (srcpath, type) { - if (type) return type - - let stats - try { - stats = fs.lstatSync(srcpath) - } catch { - return 'file' - } - return (stats && stats.isDirectory()) ? 'dir' : 'file' -} - -module.exports = { - symlinkType: u(symlinkType), - symlinkTypeSync -} diff --git a/node_modules/fs-extra/lib/ensure/symlink.js b/node_modules/fs-extra/lib/ensure/symlink.js deleted file mode 100644 index a3d5f57..0000000 --- a/node_modules/fs-extra/lib/ensure/symlink.js +++ /dev/null @@ -1,67 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const path = require('path') -const fs = require('../fs') - -const { mkdirs, mkdirsSync } = require('../mkdirs') - -const { symlinkPaths, symlinkPathsSync } = require('./symlink-paths') -const { symlinkType, symlinkTypeSync } = require('./symlink-type') - -const { pathExists } = require('../path-exists') - -const { areIdentical } = require('../util/stat') - -async function createSymlink (srcpath, dstpath, type) { - let stats - try { - stats = await fs.lstat(dstpath) - } catch { } - - if (stats && stats.isSymbolicLink()) { - const [srcStat, dstStat] = await Promise.all([ - fs.stat(srcpath), - fs.stat(dstpath) - ]) - - if (areIdentical(srcStat, dstStat)) return - } - - const relative = await symlinkPaths(srcpath, dstpath) - srcpath = relative.toDst - const toType = await symlinkType(relative.toCwd, type) - const dir = path.dirname(dstpath) - - if (!(await pathExists(dir))) { - await mkdirs(dir) - } - - return fs.symlink(srcpath, dstpath, toType) -} - -function createSymlinkSync (srcpath, dstpath, type) { - let stats - try { - stats = fs.lstatSync(dstpath) - } catch { } - if (stats && stats.isSymbolicLink()) { - const srcStat = fs.statSync(srcpath) - const dstStat = fs.statSync(dstpath) - if (areIdentical(srcStat, dstStat)) return - } - - const relative = symlinkPathsSync(srcpath, dstpath) - srcpath = relative.toDst - type = symlinkTypeSync(relative.toCwd, type) - const dir = path.dirname(dstpath) - const exists = fs.existsSync(dir) - if (exists) return fs.symlinkSync(srcpath, dstpath, type) - mkdirsSync(dir) - return fs.symlinkSync(srcpath, dstpath, type) -} - -module.exports = { - createSymlink: u(createSymlink), - createSymlinkSync -} diff --git a/node_modules/fs-extra/lib/esm.mjs b/node_modules/fs-extra/lib/esm.mjs deleted file mode 100644 index 27b7a1f..0000000 --- a/node_modules/fs-extra/lib/esm.mjs +++ /dev/null @@ -1,68 +0,0 @@ -import _copy from './copy/index.js' -import _empty from './empty/index.js' -import _ensure from './ensure/index.js' -import _json from './json/index.js' -import _mkdirs from './mkdirs/index.js' -import _move from './move/index.js' -import _outputFile from './output-file/index.js' -import _pathExists from './path-exists/index.js' -import _remove from './remove/index.js' - -// NOTE: Only exports fs-extra's functions; fs functions must be imported from "node:fs" or "node:fs/promises" - -export const copy = _copy.copy -export const copySync = _copy.copySync -export const emptyDirSync = _empty.emptyDirSync -export const emptydirSync = _empty.emptydirSync -export const emptyDir = _empty.emptyDir -export const emptydir = _empty.emptydir -export const createFile = _ensure.createFile -export const createFileSync = _ensure.createFileSync -export const ensureFile = _ensure.ensureFile -export const ensureFileSync = _ensure.ensureFileSync -export const createLink = _ensure.createLink -export const createLinkSync = _ensure.createLinkSync -export const ensureLink = _ensure.ensureLink -export const ensureLinkSync = _ensure.ensureLinkSync -export const createSymlink = _ensure.createSymlink -export const createSymlinkSync = _ensure.createSymlinkSync -export const ensureSymlink = _ensure.ensureSymlink -export const ensureSymlinkSync = _ensure.ensureSymlinkSync -export const readJson = _json.readJson -export const readJSON = _json.readJSON -export const readJsonSync = _json.readJsonSync -export const readJSONSync = _json.readJSONSync -export const writeJson = _json.writeJson -export const writeJSON = _json.writeJSON -export const writeJsonSync = _json.writeJsonSync -export const writeJSONSync = _json.writeJSONSync -export const outputJson = _json.outputJson -export const outputJSON = _json.outputJSON -export const outputJsonSync = _json.outputJsonSync -export const outputJSONSync = _json.outputJSONSync -export const mkdirs = _mkdirs.mkdirs -export const mkdirsSync = _mkdirs.mkdirsSync -export const mkdirp = _mkdirs.mkdirp -export const mkdirpSync = _mkdirs.mkdirpSync -export const ensureDir = _mkdirs.ensureDir -export const ensureDirSync = _mkdirs.ensureDirSync -export const move = _move.move -export const moveSync = _move.moveSync -export const outputFile = _outputFile.outputFile -export const outputFileSync = _outputFile.outputFileSync -export const pathExists = _pathExists.pathExists -export const pathExistsSync = _pathExists.pathExistsSync -export const remove = _remove.remove -export const removeSync = _remove.removeSync - -export default { - ..._copy, - ..._empty, - ..._ensure, - ..._json, - ..._mkdirs, - ..._move, - ..._outputFile, - ..._pathExists, - ..._remove -} diff --git a/node_modules/fs-extra/lib/fs/index.js b/node_modules/fs-extra/lib/fs/index.js deleted file mode 100644 index 3c3ec51..0000000 --- a/node_modules/fs-extra/lib/fs/index.js +++ /dev/null @@ -1,140 +0,0 @@ -'use strict' -// This is adapted from https://github.com/normalize/mz -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = require('universalify').fromCallback -const fs = require('graceful-fs') - -const api = [ - 'access', - 'appendFile', - 'chmod', - 'chown', - 'close', - 'copyFile', - 'fchmod', - 'fchown', - 'fdatasync', - 'fstat', - 'fsync', - 'ftruncate', - 'futimes', - 'lchmod', - 'lchown', - 'link', - 'lstat', - 'mkdir', - 'mkdtemp', - 'open', - 'opendir', - 'readdir', - 'readFile', - 'readlink', - 'realpath', - 'rename', - 'rm', - 'rmdir', - 'stat', - 'symlink', - 'truncate', - 'unlink', - 'utimes', - 'writeFile' -].filter(key => { - // Some commands are not available on some systems. Ex: - // fs.cp was added in Node.js v16.7.0 - // fs.lchown is not available on at least some Linux - return typeof fs[key] === 'function' -}) - -// Export cloned fs: -Object.assign(exports, fs) - -// Universalify async methods: -api.forEach(method => { - exports[method] = u(fs[method]) -}) - -// We differ from mz/fs in that we still ship the old, broken, fs.exists() -// since we are a drop-in replacement for the native module -exports.exists = function (filename, callback) { - if (typeof callback === 'function') { - return fs.exists(filename, callback) - } - return new Promise(resolve => { - return fs.exists(filename, resolve) - }) -} - -// fs.read(), fs.write(), fs.readv(), & fs.writev() need special treatment due to multiple callback args - -exports.read = function (fd, buffer, offset, length, position, callback) { - if (typeof callback === 'function') { - return fs.read(fd, buffer, offset, length, position, callback) - } - return new Promise((resolve, reject) => { - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => { - if (err) return reject(err) - resolve({ bytesRead, buffer }) - }) - }) -} - -// Function signature can be -// fs.write(fd, buffer[, offset[, length[, position]]], callback) -// OR -// fs.write(fd, string[, position[, encoding]], callback) -// We need to handle both cases, so we use ...args -exports.write = function (fd, buffer, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.write(fd, buffer, ...args) - } - - return new Promise((resolve, reject) => { - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => { - if (err) return reject(err) - resolve({ bytesWritten, buffer }) - }) - }) -} - -// Function signature is -// s.readv(fd, buffers[, position], callback) -// We need to handle the optional arg, so we use ...args -exports.readv = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.readv(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.readv(fd, buffers, ...args, (err, bytesRead, buffers) => { - if (err) return reject(err) - resolve({ bytesRead, buffers }) - }) - }) -} - -// Function signature is -// s.writev(fd, buffers[, position], callback) -// We need to handle the optional arg, so we use ...args -exports.writev = function (fd, buffers, ...args) { - if (typeof args[args.length - 1] === 'function') { - return fs.writev(fd, buffers, ...args) - } - - return new Promise((resolve, reject) => { - fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => { - if (err) return reject(err) - resolve({ bytesWritten, buffers }) - }) - }) -} - -// fs.realpath.native sometimes not available if fs is monkey-patched -if (typeof fs.realpath.native === 'function') { - exports.realpath.native = u(fs.realpath.native) -} else { - process.emitWarning( - 'fs.realpath.native is not a function. Is fs being monkey-patched?', - 'Warning', 'fs-extra-WARN0003' - ) -} diff --git a/node_modules/fs-extra/lib/index.js b/node_modules/fs-extra/lib/index.js deleted file mode 100644 index da6711a..0000000 --- a/node_modules/fs-extra/lib/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -module.exports = { - // Export promiseified graceful-fs: - ...require('./fs'), - // Export extra methods: - ...require('./copy'), - ...require('./empty'), - ...require('./ensure'), - ...require('./json'), - ...require('./mkdirs'), - ...require('./move'), - ...require('./output-file'), - ...require('./path-exists'), - ...require('./remove') -} diff --git a/node_modules/fs-extra/lib/json/index.js b/node_modules/fs-extra/lib/json/index.js deleted file mode 100644 index 900126a..0000000 --- a/node_modules/fs-extra/lib/json/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const jsonFile = require('./jsonfile') - -jsonFile.outputJson = u(require('./output-json')) -jsonFile.outputJsonSync = require('./output-json-sync') -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile diff --git a/node_modules/fs-extra/lib/json/jsonfile.js b/node_modules/fs-extra/lib/json/jsonfile.js deleted file mode 100644 index f11d34d..0000000 --- a/node_modules/fs-extra/lib/json/jsonfile.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -const jsonFile = require('jsonfile') - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} diff --git a/node_modules/fs-extra/lib/json/output-json-sync.js b/node_modules/fs-extra/lib/json/output-json-sync.js deleted file mode 100644 index d4e564f..0000000 --- a/node_modules/fs-extra/lib/json/output-json-sync.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict' - -const { stringify } = require('jsonfile/utils') -const { outputFileSync } = require('../output-file') - -function outputJsonSync (file, data, options) { - const str = stringify(data, options) - - outputFileSync(file, str, options) -} - -module.exports = outputJsonSync diff --git a/node_modules/fs-extra/lib/json/output-json.js b/node_modules/fs-extra/lib/json/output-json.js deleted file mode 100644 index 0afdeb6..0000000 --- a/node_modules/fs-extra/lib/json/output-json.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict' - -const { stringify } = require('jsonfile/utils') -const { outputFile } = require('../output-file') - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) - - await outputFile(file, str, options) -} - -module.exports = outputJson diff --git a/node_modules/fs-extra/lib/mkdirs/index.js b/node_modules/fs-extra/lib/mkdirs/index.js deleted file mode 100644 index 9edecee..0000000 --- a/node_modules/fs-extra/lib/mkdirs/index.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' -const u = require('universalify').fromPromise -const { makeDir: _makeDir, makeDirSync } = require('./make-dir') -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} diff --git a/node_modules/fs-extra/lib/mkdirs/make-dir.js b/node_modules/fs-extra/lib/mkdirs/make-dir.js deleted file mode 100644 index 45ece64..0000000 --- a/node_modules/fs-extra/lib/mkdirs/make-dir.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' -const fs = require('../fs') -const { checkPath } = require('./utils') - -const getMode = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') return options - return ({ ...defaults, ...options }).mode -} - -module.exports.makeDir = async (dir, options) => { - checkPath(dir) - - return fs.mkdir(dir, { - mode: getMode(options), - recursive: true - }) -} - -module.exports.makeDirSync = (dir, options) => { - checkPath(dir) - - return fs.mkdirSync(dir, { - mode: getMode(options), - recursive: true - }) -} diff --git a/node_modules/fs-extra/lib/mkdirs/utils.js b/node_modules/fs-extra/lib/mkdirs/utils.js deleted file mode 100644 index a4059ad..0000000 --- a/node_modules/fs-extra/lib/mkdirs/utils.js +++ /dev/null @@ -1,21 +0,0 @@ -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -'use strict' -const path = require('path') - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -module.exports.checkPath = function checkPath (pth) { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} diff --git a/node_modules/fs-extra/lib/move/index.js b/node_modules/fs-extra/lib/move/index.js deleted file mode 100644 index 5a2f1cc..0000000 --- a/node_modules/fs-extra/lib/move/index.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -module.exports = { - move: u(require('./move')), - moveSync: require('./move-sync') -} diff --git a/node_modules/fs-extra/lib/move/move-sync.js b/node_modules/fs-extra/lib/move/move-sync.js deleted file mode 100644 index c456ab3..0000000 --- a/node_modules/fs-extra/lib/move/move-sync.js +++ /dev/null @@ -1,55 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const path = require('path') -const copySync = require('../copy').copySync -const removeSync = require('../remove').removeSync -const mkdirpSync = require('../mkdirs').mkdirpSync -const stat = require('../util/stat') - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts) - stat.checkParentPathsSync(src, srcStat, dest, 'move') - if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite, isChangingCase) -} - -function isParentRoot (dest) { - const parent = path.dirname(dest) - const parsedPath = path.parse(parent) - return parsedPath.root === parent -} - -function doRename (src, dest, overwrite, isChangingCase) { - if (isChangingCase) return rename(src, dest, overwrite) - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true, - preserveTimestamps: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync diff --git a/node_modules/fs-extra/lib/move/move.js b/node_modules/fs-extra/lib/move/move.js deleted file mode 100644 index 992bd0f..0000000 --- a/node_modules/fs-extra/lib/move/move.js +++ /dev/null @@ -1,59 +0,0 @@ -'use strict' - -const fs = require('../fs') -const path = require('path') -const { copy } = require('../copy') -const { remove } = require('../remove') -const { mkdirp } = require('../mkdirs') -const { pathExists } = require('../path-exists') -const stat = require('../util/stat') - -async function move (src, dest, opts = {}) { - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat, isChangingCase = false } = await stat.checkPaths(src, dest, 'move', opts) - - await stat.checkParentPaths(src, srcStat, dest, 'move') - - // If the parent of dest is not root, make sure it exists before proceeding - const destParent = path.dirname(dest) - const parsedParentPath = path.parse(destParent) - if (parsedParentPath.root !== destParent) { - await mkdirp(destParent) - } - - return doRename(src, dest, overwrite, isChangingCase) -} - -async function doRename (src, dest, overwrite, isChangingCase) { - if (!isChangingCase) { - if (overwrite) { - await remove(dest) - } else if (await pathExists(dest)) { - throw new Error('dest already exists.') - } - } - - try { - // Try w/ rename first, and try copy + remove if EXDEV - await fs.rename(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') { - throw err - } - await moveAcrossDevice(src, dest, overwrite) - } -} - -async function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true, - preserveTimestamps: true - } - - await copy(src, dest, opts) - return remove(src) -} - -module.exports = move diff --git a/node_modules/fs-extra/lib/output-file/index.js b/node_modules/fs-extra/lib/output-file/index.js deleted file mode 100644 index a42d943..0000000 --- a/node_modules/fs-extra/lib/output-file/index.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const u = require('universalify').fromPromise -const fs = require('../fs') -const path = require('path') -const mkdir = require('../mkdirs') -const pathExists = require('../path-exists').pathExists - -async function outputFile (file, data, encoding = 'utf-8') { - const dir = path.dirname(file) - - if (!(await pathExists(dir))) { - await mkdir.mkdirs(dir) - } - - return fs.writeFile(file, data, encoding) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (!fs.existsSync(dir)) { - mkdir.mkdirsSync(dir) - } - - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} diff --git a/node_modules/fs-extra/lib/path-exists/index.js b/node_modules/fs-extra/lib/path-exists/index.js deleted file mode 100644 index ddd9bc7..0000000 --- a/node_modules/fs-extra/lib/path-exists/index.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict' -const u = require('universalify').fromPromise -const fs = require('../fs') - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} diff --git a/node_modules/fs-extra/lib/remove/index.js b/node_modules/fs-extra/lib/remove/index.js deleted file mode 100644 index da746c7..0000000 --- a/node_modules/fs-extra/lib/remove/index.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' - -const fs = require('graceful-fs') -const u = require('universalify').fromCallback - -function remove (path, callback) { - fs.rm(path, { recursive: true, force: true }, callback) -} - -function removeSync (path) { - fs.rmSync(path, { recursive: true, force: true }) -} - -module.exports = { - remove: u(remove), - removeSync -} diff --git a/node_modules/fs-extra/lib/util/stat.js b/node_modules/fs-extra/lib/util/stat.js deleted file mode 100644 index dfd37d9..0000000 --- a/node_modules/fs-extra/lib/util/stat.js +++ /dev/null @@ -1,158 +0,0 @@ -'use strict' - -const fs = require('../fs') -const path = require('path') -const u = require('universalify').fromPromise - -function getStats (src, dest, opts) { - const statFunc = opts.dereference - ? (file) => fs.stat(file, { bigint: true }) - : (file) => fs.lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest, opts) { - let destStat - const statFunc = opts.dereference - ? (file) => fs.statSync(file, { bigint: true }) - : (file) => fs.lstatSync(file, { bigint: true }) - const srcStat = statFunc(src) - try { - destStat = statFunc(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -async function checkPaths (src, dest, funcName, opts) { - const { srcStat, destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - - return { srcStat, destStat } -} - -function checkPathsSync (src, dest, funcName, opts) { - const { srcStat, destStat } = getStatsSync(src, dest, opts) - - if (destStat) { - if (areIdentical(srcStat, destStat)) { - const srcBaseName = path.basename(src) - const destBaseName = path.basename(dest) - if (funcName === 'move' && - srcBaseName !== destBaseName && - srcBaseName.toLowerCase() === destBaseName.toLowerCase()) { - return { srcStat, destStat, isChangingCase: true } - } - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - - let destStat - try { - destStat = await fs.stat(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - - return checkParentPaths(src, srcStat, destParent, funcName) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = fs.statSync(destParent, { bigint: true }) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - // checkPaths - checkPaths: u(checkPaths), - checkPathsSync, - // checkParent - checkParentPaths: u(checkParentPaths), - checkParentPathsSync, - // Misc - isSrcSubdir, - areIdentical -} diff --git a/node_modules/fs-extra/lib/util/utimes.js b/node_modules/fs-extra/lib/util/utimes.js deleted file mode 100644 index 87f4588..0000000 --- a/node_modules/fs-extra/lib/util/utimes.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict' - -const fs = require('../fs') -const u = require('universalify').fromPromise - -async function utimesMillis (path, atime, mtime) { - // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) - const fd = await fs.open(path, 'r+') - - let closeErr = null - - try { - await fs.futimes(fd, atime, mtime) - } finally { - try { - await fs.close(fd) - } catch (e) { - closeErr = e - } - } - - if (closeErr) { - throw closeErr - } -} - -function utimesMillisSync (path, atime, mtime) { - const fd = fs.openSync(path, 'r+') - fs.futimesSync(fd, atime, mtime) - return fs.closeSync(fd) -} - -module.exports = { - utimesMillis: u(utimesMillis), - utimesMillisSync -} diff --git a/node_modules/fs-extra/package.json b/node_modules/fs-extra/package.json deleted file mode 100644 index f3f6ba3..0000000 --- a/node_modules/fs-extra/package.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "name": "fs-extra", - "version": "11.2.0", - "description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as recursive mkdir, copy, and remove.", - "engines": { - "node": ">=14.14" - }, - "homepage": "https://github.com/jprichardson/node-fs-extra", - "repository": { - "type": "git", - "url": "https://github.com/jprichardson/node-fs-extra" - }, - "keywords": [ - "fs", - "file", - "file system", - "copy", - "directory", - "extra", - "mkdirp", - "mkdir", - "mkdirs", - "recursive", - "json", - "read", - "write", - "extra", - "delete", - "remove", - "touch", - "create", - "text", - "output", - "move", - "promise" - ], - "author": "JP Richardson ", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "devDependencies": { - "klaw": "^2.1.1", - "klaw-sync": "^3.0.2", - "minimist": "^1.1.1", - "mocha": "^10.1.0", - "nyc": "^15.0.0", - "proxyquire": "^2.0.1", - "read-dir-files": "^0.1.1", - "standard": "^17.0.0" - }, - "main": "./lib/index.js", - "exports": { - ".": "./lib/index.js", - "./esm": "./lib/esm.mjs" - }, - "files": [ - "lib/", - "!lib/**/__tests__/" - ], - "scripts": { - "lint": "standard", - "test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha", - "test": "npm run lint && npm run unit && npm run unit-esm", - "unit": "nyc node test.js", - "unit-esm": "node test.mjs" - }, - "sideEffects": false -} diff --git a/node_modules/get-caller-file/LICENSE.md b/node_modules/get-caller-file/LICENSE.md deleted file mode 100644 index bf3e1c0..0000000 --- a/node_modules/get-caller-file/LICENSE.md +++ /dev/null @@ -1,6 +0,0 @@ -ISC License (ISC) -Copyright 2018 Stefan Penner - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/get-caller-file/README.md b/node_modules/get-caller-file/README.md deleted file mode 100644 index a7d8c07..0000000 --- a/node_modules/get-caller-file/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# get-caller-file - -[![Build Status](https://travis-ci.org/stefanpenner/get-caller-file.svg?branch=master)](https://travis-ci.org/stefanpenner/get-caller-file) -[![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master) - -This is a utility, which allows a function to figure out from which file it was invoked. It does so by inspecting v8's stack trace at the time it is invoked. - -Inspired by http://stackoverflow.com/questions/13227489 - -*note: this relies on Node/V8 specific APIs, as such other runtimes may not work* - -## Installation - -```bash -yarn add get-caller-file -``` - -## Usage - -Given: - -```js -// ./foo.js -const getCallerFile = require('get-caller-file'); - -module.exports = function() { - return getCallerFile(); // figures out who called it -}; -``` - -```js -// index.js -const foo = require('./foo'); - -foo() // => /full/path/to/this/file/index.js -``` - - -## Options: - -* `getCallerFile(position = 2)`: where position is stack frame whos fileName we want. diff --git a/node_modules/get-caller-file/index.d.ts b/node_modules/get-caller-file/index.d.ts deleted file mode 100644 index babed69..0000000 --- a/node_modules/get-caller-file/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare const _default: (position?: number) => any; -export = _default; diff --git a/node_modules/get-caller-file/index.js b/node_modules/get-caller-file/index.js deleted file mode 100644 index 57304f8..0000000 --- a/node_modules/get-caller-file/index.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -// Call this function in a another function to find out the file from -// which that function was called from. (Inspects the v8 stack trace) -// -// Inspired by http://stackoverflow.com/questions/13227489 -module.exports = function getCallerFile(position) { - if (position === void 0) { position = 2; } - if (position >= Error.stackTraceLimit) { - throw new TypeError('getCallerFile(position) requires position be less then Error.stackTraceLimit but position was: `' + position + '` and Error.stackTraceLimit was: `' + Error.stackTraceLimit + '`'); - } - var oldPrepareStackTrace = Error.prepareStackTrace; - Error.prepareStackTrace = function (_, stack) { return stack; }; - var stack = new Error().stack; - Error.prepareStackTrace = oldPrepareStackTrace; - if (stack !== null && typeof stack === 'object') { - // stack[0] holds this file - // stack[1] holds where this function was called - // stack[2] holds the file we're interested in - return stack[position] ? stack[position].getFileName() : undefined; - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/get-caller-file/index.js.map b/node_modules/get-caller-file/index.js.map deleted file mode 100644 index 89c655c..0000000 --- a/node_modules/get-caller-file/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA,qEAAqE;AACrE,qEAAqE;AACrE,EAAE;AACF,0DAA0D;AAE1D,iBAAS,SAAS,aAAa,CAAC,QAAY;IAAZ,yBAAA,EAAA,YAAY;IAC1C,IAAI,QAAQ,IAAI,KAAK,CAAC,eAAe,EAAE;QACrC,MAAM,IAAI,SAAS,CAAC,kGAAkG,GAAG,QAAQ,GAAG,oCAAoC,GAAG,KAAK,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC;KACzM;IAED,IAAM,oBAAoB,GAAG,KAAK,CAAC,iBAAiB,CAAC;IACrD,KAAK,CAAC,iBAAiB,GAAG,UAAC,CAAC,EAAE,KAAK,IAAM,OAAA,KAAK,EAAL,CAAK,CAAC;IAC/C,IAAM,KAAK,GAAG,IAAI,KAAK,EAAE,CAAC,KAAK,CAAC;IAChC,KAAK,CAAC,iBAAiB,GAAG,oBAAoB,CAAC;IAG/C,IAAI,KAAK,KAAK,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC/C,2BAA2B;QAC3B,gDAAgD;QAChD,8CAA8C;QAC9C,OAAO,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAE,KAAK,CAAC,QAAQ,CAAS,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;KAC7E;AACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/get-caller-file/package.json b/node_modules/get-caller-file/package.json deleted file mode 100644 index b0dd571..0000000 --- a/node_modules/get-caller-file/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "get-caller-file", - "version": "2.0.5", - "description": "", - "main": "index.js", - "directories": { - "test": "tests" - }, - "files": [ - "index.js", - "index.js.map", - "index.d.ts" - ], - "scripts": { - "prepare": "tsc", - "test": "mocha test", - "test:debug": "mocha test" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/stefanpenner/get-caller-file.git" - }, - "author": "Stefan Penner", - "license": "ISC", - "bugs": { - "url": "https://github.com/stefanpenner/get-caller-file/issues" - }, - "homepage": "https://github.com/stefanpenner/get-caller-file#readme", - "devDependencies": { - "@types/chai": "^4.1.7", - "@types/ensure-posix-path": "^1.0.0", - "@types/mocha": "^5.2.6", - "@types/node": "^11.10.5", - "chai": "^4.1.2", - "ensure-posix-path": "^1.0.1", - "mocha": "^5.2.0", - "typescript": "^3.3.3333" - }, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } -} diff --git a/node_modules/get-stdin/index.d.ts b/node_modules/get-stdin/index.d.ts deleted file mode 100644 index c2a0605..0000000 --- a/node_modules/get-stdin/index.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -declare const getStdin: { - /** - Get [`stdin`](https://nodejs.org/api/process.html#process_process_stdin) as a `string`. - - @returns A promise that is resolved when the `end` event fires on the `stdin` stream, indicating that there is no more data to be read. In a TTY context, an empty `string` is returned. - - @example - ``` - // example.ts - import getStdin from 'get-stdin'; - - console.log(await getStdin()); - //=> 'unicorns' - - // $ echo unicorns | ts-node example.ts - // unicorns - ``` - */ - (): Promise; - - /** - Get [`stdin`](https://nodejs.org/api/process.html#process_process_stdin) as a `Buffer`. - - @returns A promise that is resolved when the `end` event fires on the `stdin` stream, indicating that there is no more data to be read. In a TTY context, an empty `Buffer` is returned. - */ - buffer(): Promise; -}; - -export default getStdin; diff --git a/node_modules/get-stdin/index.js b/node_modules/get-stdin/index.js deleted file mode 100644 index e8182da..0000000 --- a/node_modules/get-stdin/index.js +++ /dev/null @@ -1,33 +0,0 @@ -const {stdin} = process; - -export default async function getStdin() { - let result = ''; - - if (stdin.isTTY) { - return result; - } - - stdin.setEncoding('utf8'); - - for await (const chunk of stdin) { - result += chunk; - } - - return result; -} - -getStdin.buffer = async () => { - const result = []; - let length = 0; - - if (stdin.isTTY) { - return Buffer.concat([]); - } - - for await (const chunk of stdin) { - result.push(chunk); - length += chunk.length; - } - - return Buffer.concat(result, length); -}; diff --git a/node_modules/get-stdin/license b/node_modules/get-stdin/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/get-stdin/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/get-stdin/package.json b/node_modules/get-stdin/package.json deleted file mode 100644 index bd758aa..0000000 --- a/node_modules/get-stdin/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "get-stdin", - "version": "9.0.0", - "description": "Get stdin as a string or buffer", - "license": "MIT", - "repository": "sindresorhus/get-stdin", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": "./index.js", - "engines": { - "node": ">=12" - }, - "scripts": { - "test": "xo && ava test.js test-buffer.js && echo unicorns | node test-real.js && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "std", - "stdin", - "stdio", - "concat", - "buffer", - "stream", - "process", - "read" - ], - "devDependencies": { - "@types/node": "^14.14.41", - "ava": "^3.15.0", - "delay": "^5.0.0", - "tsd": "^0.14.0", - "xo": "^0.38.2" - } -} diff --git a/node_modules/get-stdin/readme.md b/node_modules/get-stdin/readme.md deleted file mode 100644 index ede347a..0000000 --- a/node_modules/get-stdin/readme.md +++ /dev/null @@ -1,56 +0,0 @@ -# get-stdin - -> Get [stdin](https://nodejs.org/api/process.html#process_process_stdin) as a string or buffer - -## Install - -``` -$ npm install get-stdin -``` - -## Usage - -```js -// example.js -import getStdin from 'get-stdin'; - -console.log(await getStdin()); -//=> 'unicorns' -``` - -``` -$ echo unicorns | node example.js -unicorns -``` - -## API - -Both methods returns a promise that is resolved when the `end` event fires on the `stdin` stream, indicating that there is no more data to be read. - -### getStdin() - -Get `stdin` as a `string`. - -In a TTY context, a promise that resolves to an empty `string` is returned. - -### getStdin.buffer() - -Get `stdin` as a `Buffer`. - -In a TTY context, a promise that resolves to an empty `Buffer` is returned. - -## Related - -- [get-stream](https://github.com/sindresorhus/get-stream) - Get a stream as a string or buffer - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/glob-parent/CHANGELOG.md b/node_modules/glob-parent/CHANGELOG.md deleted file mode 100644 index fb9de96..0000000 --- a/node_modules/glob-parent/CHANGELOG.md +++ /dev/null @@ -1,110 +0,0 @@ -### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) - - -### Bug Fixes - -* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) - -### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) - - -### Bug Fixes - -* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) - -## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) - - -### Features - -* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) - -## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* Drop support for node <6 & bump dependencies - -### Miscellaneous Chores - -* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) - -## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* question marks are valid path characters on Windows so avoid flagging as a glob when alone -* Update is-glob dependency - -### Features - -* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) -* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) -* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) - -## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) - - -### Features - -* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) -* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) -* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) -* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) -* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) - - -### Bug Fixes - -* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) - -### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) - - -### Features - -* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) - - -### Bug Fixes - -* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) - -## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* update is-glob dependency - -### Features - -* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) - -## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) - - -### Features - -* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) - -## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) - -## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) - - -### Reverts - -* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) - -## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) - - -### Features - -* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) - -## 1.0.0 (2021-01-27) - diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE deleted file mode 100644 index 63222d7..0000000 --- a/node_modules/glob-parent/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2015, 2019 Elan Shanker - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md deleted file mode 100644 index 36a2793..0000000 --- a/node_modules/glob-parent/README.md +++ /dev/null @@ -1,137 +0,0 @@ -

- - - -

- -# glob-parent - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] - -Extract the non-magic parent path from a glob string. - -## Usage - -```js -var globParent = require('glob-parent'); - -globParent('path/to/*.js'); // 'path/to' -globParent('/root/path/to/*.js'); // '/root/path/to' -globParent('/*.js'); // '/' -globParent('*.js'); // '.' -globParent('**/*.js'); // '.' -globParent('path/{to,from}'); // 'path' -globParent('path/!(to|from)'); // 'path' -globParent('path/?(to|from)'); // 'path' -globParent('path/+(to|from)'); // 'path' -globParent('path/*(to|from)'); // 'path' -globParent('path/@(to|from)'); // 'path' -globParent('path/**/*'); // 'path' - -// if provided a non-glob path, returns the nearest dir -globParent('path/foo/bar.js'); // 'path/foo' -globParent('path/foo/'); // 'path/foo' -globParent('path/foo'); // 'path' (see issue #3 for details) -``` - -## API - -### `globParent(maybeGlobString, [options])` - -Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. - -#### options - -```js -{ - // Disables the automatic conversion of slashes for Windows - flipBackslashes: true -} -``` - -## Escaping - -The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: - -- `?` (question mark) unless used as a path segment alone -- `*` (asterisk) -- `|` (pipe) -- `(` (opening parenthesis) -- `)` (closing parenthesis) -- `{` (opening curly brace) -- `}` (closing curly brace) -- `[` (opening bracket) -- `]` (closing bracket) - -**Example** - -```js -globParent('foo/[bar]/') // 'foo' -globParent('foo/\\[bar]/') // 'foo/[bar]' -``` - -## Limitations - -### Braces & Brackets -This library attempts a quick and imperfect method of determining which path -parts have glob magic without fully parsing/lexing the pattern. There are some -advanced use cases that can trip it up, such as nested braces where the outer -pair is escaped and the inner one contains a path separator. If you find -yourself in the unlikely circumstance of being affected by this or need to -ensure higher-fidelity glob handling in your library, it is recommended that you -pre-process your input with [expand-braces] and/or [expand-brackets]. - -### Windows -Backslashes are not valid path separators for globs. If a path with backslashes -is provided anyway, for simple cases, glob-parent will replace the path -separator for you and return the non-glob parent path (now with -forward-slashes, which are still valid as Windows path separators). - -This cannot be used in conjunction with escape characters. - -```js -// BAD -globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' - -// GOOD -globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' -``` - -If you are using escape characters for a pattern without path parts (i.e. -relative to `cwd`), prefix with `./` to avoid confusing glob-parent. - -```js -// BAD -globParent('foo \\[bar]') // 'foo ' -globParent('foo \\[bar]*') // 'foo ' - -// GOOD -globParent('./foo \\[bar]') // 'foo [bar]' -globParent('./foo \\[bar]*') // '.' -``` - -## License - -ISC - -[expand-braces]: https://github.com/jonschlinkert/expand-braces -[expand-brackets]: https://github.com/jonschlinkert/expand-brackets - -[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg -[npm-url]: https://www.npmjs.com/package/glob-parent -[npm-image]: https://img.shields.io/npm/v/glob-parent.svg - -[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master -[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master - -[travis-url]: https://travis-ci.org/gulpjs/glob-parent -[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci - -[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent -[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor - -[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent -[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg - -[gitter-url]: https://gitter.im/gulpjs/gulp -[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js deleted file mode 100644 index 09e257e..0000000 --- a/node_modules/glob-parent/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -var isGlob = require('is-glob'); -var pathPosixDirname = require('path').posix.dirname; -var isWin32 = require('os').platform() === 'win32'; - -var slash = '/'; -var backslash = /\\/g; -var enclosure = /[\{\[].*[\}\]]$/; -var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; -var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - -/** - * @param {string} str - * @param {Object} opts - * @param {boolean} [opts.flipBackslashes=true] - * @returns {string} - */ -module.exports = function globParent(str, opts) { - var options = Object.assign({ flipBackslashes: true }, opts); - - // flip windows path separators - if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { - str = str.replace(backslash, slash); - } - - // special case for strings ending in enclosure containing path separator - if (enclosure.test(str)) { - str += slash; - } - - // preserves full path in case of trailing path separator - str += 'a'; - - // remove path parts that are globby - do { - str = pathPosixDirname(str); - } while (isGlob(str) || globby.test(str)); - - // remove escape chars and return result - return str.replace(escaped, '$1'); -}; diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json deleted file mode 100644 index 125c971..0000000 --- a/node_modules/glob-parent/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "glob-parent", - "version": "5.1.2", - "description": "Extract the non-magic parent path from a glob string.", - "author": "Gulp Team (https://gulpjs.com/)", - "contributors": [ - "Elan Shanker (https://github.com/es128)", - "Blaine Bublitz " - ], - "repository": "gulpjs/glob-parent", - "license": "ISC", - "engines": { - "node": ">= 6" - }, - "main": "index.js", - "files": [ - "LICENSE", - "index.js" - ], - "scripts": { - "lint": "eslint .", - "pretest": "npm run lint", - "test": "nyc mocha --async-only", - "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", - "coveralls": "nyc report --reporter=text-lcov | coveralls" - }, - "dependencies": { - "is-glob": "^4.0.1" - }, - "devDependencies": { - "coveralls": "^3.0.11", - "eslint": "^2.13.1", - "eslint-config-gulp": "^3.0.1", - "expect": "^1.20.2", - "mocha": "^6.0.2", - "nyc": "^13.3.0" - }, - "keywords": [ - "glob", - "parent", - "strip", - "path", - "dirname", - "directory", - "base", - "wildcard" - ] -} diff --git a/node_modules/globby/ignore.js b/node_modules/globby/ignore.js deleted file mode 100644 index cf419b6..0000000 --- a/node_modules/globby/ignore.js +++ /dev/null @@ -1,109 +0,0 @@ -import process from 'node:process'; -import fs from 'node:fs'; -import fsPromises from 'node:fs/promises'; -import path from 'node:path'; -import fastGlob from 'fast-glob'; -import gitIgnore from 'ignore'; -import slash from 'slash'; -import {toPath} from 'unicorn-magic'; -import {isNegativePattern} from './utilities.js'; - -const defaultIgnoredDirectories = [ - '**/node_modules', - '**/flow-typed', - '**/coverage', - '**/.git', -]; -const ignoreFilesGlobOptions = { - absolute: true, - dot: true, -}; - -export const GITIGNORE_FILES_PATTERN = '**/.gitignore'; - -const applyBaseToPattern = (pattern, base) => isNegativePattern(pattern) - ? '!' + path.posix.join(base, pattern.slice(1)) - : path.posix.join(base, pattern); - -const parseIgnoreFile = (file, cwd) => { - const base = slash(path.relative(cwd, path.dirname(file.filePath))); - - return file.content - .split(/\r?\n/) - .filter(line => line && !line.startsWith('#')) - .map(pattern => applyBaseToPattern(pattern, base)); -}; - -const toRelativePath = (fileOrDirectory, cwd) => { - cwd = slash(cwd); - if (path.isAbsolute(fileOrDirectory)) { - if (slash(fileOrDirectory).startsWith(cwd)) { - return path.relative(cwd, fileOrDirectory); - } - - throw new Error(`Path ${fileOrDirectory} is not in cwd ${cwd}`); - } - - return fileOrDirectory; -}; - -const getIsIgnoredPredicate = (files, cwd) => { - const patterns = files.flatMap(file => parseIgnoreFile(file, cwd)); - const ignores = gitIgnore().add(patterns); - - return fileOrDirectory => { - fileOrDirectory = toPath(fileOrDirectory); - fileOrDirectory = toRelativePath(fileOrDirectory, cwd); - return fileOrDirectory ? ignores.ignores(slash(fileOrDirectory)) : false; - }; -}; - -const normalizeOptions = (options = {}) => ({ - cwd: toPath(options.cwd) ?? process.cwd(), - suppressErrors: Boolean(options.suppressErrors), - deep: typeof options.deep === 'number' ? options.deep : Number.POSITIVE_INFINITY, - ignore: [...options.ignore ?? [], ...defaultIgnoredDirectories], -}); - -export const isIgnoredByIgnoreFiles = async (patterns, options) => { - const {cwd, suppressErrors, deep, ignore} = normalizeOptions(options); - - const paths = await fastGlob(patterns, { - cwd, - suppressErrors, - deep, - ignore, - ...ignoreFilesGlobOptions, - }); - - const files = await Promise.all( - paths.map(async filePath => ({ - filePath, - content: await fsPromises.readFile(filePath, 'utf8'), - })), - ); - - return getIsIgnoredPredicate(files, cwd); -}; - -export const isIgnoredByIgnoreFilesSync = (patterns, options) => { - const {cwd, suppressErrors, deep, ignore} = normalizeOptions(options); - - const paths = fastGlob.sync(patterns, { - cwd, - suppressErrors, - deep, - ignore, - ...ignoreFilesGlobOptions, - }); - - const files = paths.map(filePath => ({ - filePath, - content: fs.readFileSync(filePath, 'utf8'), - })); - - return getIsIgnoredPredicate(files, cwd); -}; - -export const isGitIgnored = options => isIgnoredByIgnoreFiles(GITIGNORE_FILES_PATTERN, options); -export const isGitIgnoredSync = options => isIgnoredByIgnoreFilesSync(GITIGNORE_FILES_PATTERN, options); diff --git a/node_modules/globby/index.d.ts b/node_modules/globby/index.d.ts deleted file mode 100644 index 9552f86..0000000 --- a/node_modules/globby/index.d.ts +++ /dev/null @@ -1,207 +0,0 @@ -import type FastGlob from 'fast-glob'; - -export type GlobEntry = FastGlob.Entry; - -export type GlobTask = { - readonly patterns: string[]; - readonly options: Options; -}; - -export type ExpandDirectoriesOption = - | boolean - | readonly string[] - | {files?: readonly string[]; extensions?: readonly string[]}; - -type FastGlobOptionsWithoutCwd = Omit; - -export type Options = { - /** - If set to `true`, `globby` will automatically glob directories for you. If you define an `Array` it will only glob files that matches the patterns inside the `Array`. You can also define an `Object` with `files` and `extensions` like in the example below. - - Note that if you set this option to `false`, you won't get back matched directories unless you set `onlyFiles: false`. - - @default true - - @example - ``` - import {globby} from 'globby'; - - const paths = await globby('images', { - expandDirectories: { - files: ['cat', 'unicorn', '*.jpg'], - extensions: ['png'] - } - }); - - console.log(paths); - //=> ['cat.png', 'unicorn.png', 'cow.jpg', 'rainbow.jpg'] - ``` - */ - readonly expandDirectories?: ExpandDirectoriesOption; - - /** - Respect ignore patterns in `.gitignore` files that apply to the globbed files. - - @default false - */ - readonly gitignore?: boolean; - - /** - Glob patterns to look for ignore files, which are then used to ignore globbed files. - - This is a more generic form of the `gitignore` option, allowing you to find ignore files with a [compatible syntax](http://git-scm.com/docs/gitignore). For instance, this works with Babel's `.babelignore`, Prettier's `.prettierignore`, or ESLint's `.eslintignore` files. - - @default undefined - */ - readonly ignoreFiles?: string | readonly string[]; - - /** - The current working directory in which to search. - - @default process.cwd() - */ - readonly cwd?: URL | string; -} & FastGlobOptionsWithoutCwd; - -export type GitignoreOptions = { - readonly cwd?: URL | string; -}; - -export type GlobbyFilterFunction = (path: URL | string) => boolean; - -/** -Find files and directories using glob patterns. - -Note that glob patterns can only contain forward-slashes, not backward-slashes, so if you want to construct a glob pattern from path components, you need to use `path.posix.join()` instead of `path.join()`. - -@param patterns - See the supported [glob patterns](https://github.com/sindresorhus/globby#globbing-patterns). -@param options - See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3) in addition to the ones in this package. -@returns The matching paths. - -@example -``` -import {globby} from 'globby'; - -const paths = await globby(['*', '!cake']); - -console.log(paths); -//=> ['unicorn', 'rainbow'] -``` -*/ -export function globby( - patterns: string | readonly string[], - options: Options & {objectMode: true} -): Promise; -export function globby( - patterns: string | readonly string[], - options?: Options -): Promise; - -/** -Find files and directories using glob patterns. - -Note that glob patterns can only contain forward-slashes, not backward-slashes, so if you want to construct a glob pattern from path components, you need to use `path.posix.join()` instead of `path.join()`. - -@param patterns - See the supported [glob patterns](https://github.com/sindresorhus/globby#globbing-patterns). -@param options - See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3) in addition to the ones in this package. -@returns The matching paths. -*/ -export function globbySync( - patterns: string | readonly string[], - options: Options & {objectMode: true} -): GlobEntry[]; -export function globbySync( - patterns: string | readonly string[], - options?: Options -): string[]; - -/** -Find files and directories using glob patterns. - -Note that glob patterns can only contain forward-slashes, not backward-slashes, so if you want to construct a glob pattern from path components, you need to use `path.posix.join()` instead of `path.join()`. - -@param patterns - See the supported [glob patterns](https://github.com/sindresorhus/globby#globbing-patterns). -@param options - See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3) in addition to the ones in this package. -@returns The stream of matching paths. - -@example -``` -import {globbyStream} from 'globby'; - -for await (const path of globbyStream('*.tmp')) { - console.log(path); -} -``` -*/ -export function globbyStream( - patterns: string | readonly string[], - options?: Options -): NodeJS.ReadableStream; - -/** -Note that you should avoid running the same tasks multiple times as they contain a file system cache. Instead, run this method each time to ensure file system changes are taken into consideration. - -@param patterns - See the supported [glob patterns](https://github.com/sindresorhus/globby#globbing-patterns). -@param options - See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3) in addition to the ones in this package. -@returns An object in the format `{pattern: string, options: object}`, which can be passed as arguments to [`fast-glob`](https://github.com/mrmlnc/fast-glob). This is useful for other globbing-related packages. -*/ -export function generateGlobTasks( - patterns: string | readonly string[], - options?: Options -): Promise; - -/** -@see generateGlobTasks - -@returns An object in the format `{pattern: string, options: object}`, which can be passed as arguments to [`fast-glob`](https://github.com/mrmlnc/fast-glob). This is useful for other globbing-related packages. -*/ -export function generateGlobTasksSync( - patterns: string | readonly string[], - options?: Options -): GlobTask[]; - -/** -Note that the options affect the results. - -This function is backed by [`fast-glob`](https://github.com/mrmlnc/fast-glob#isdynamicpatternpattern-options). - -@param patterns - See the supported [glob patterns](https://github.com/sindresorhus/globby#globbing-patterns). -@param options - See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3). -@returns Whether there are any special glob characters in the `patterns`. -*/ -export function isDynamicPattern( - patterns: string | readonly string[], - options?: FastGlobOptionsWithoutCwd & { - /** - The current working directory in which to search. - - @default process.cwd() - */ - readonly cwd?: URL | string; - } -): boolean; - -/** -`.gitignore` files matched by the ignore config are not used for the resulting filter function. - -@returns A filter function indicating whether a given path is ignored via a `.gitignore` file. - -@example -``` -import {isGitIgnored} from 'globby'; - -const isIgnored = await isGitIgnored(); - -console.log(isIgnored('some/file')); -``` -*/ -export function isGitIgnored(options?: GitignoreOptions): Promise; - -/** -@see isGitIgnored - -@returns A filter function indicating whether a given path is ignored via a `.gitignore` file. -*/ -export function isGitIgnoredSync(options?: GitignoreOptions): GlobbyFilterFunction; - -export function convertPathToPattern(source: string): FastGlob.Pattern; diff --git a/node_modules/globby/index.js b/node_modules/globby/index.js deleted file mode 100644 index 1847350..0000000 --- a/node_modules/globby/index.js +++ /dev/null @@ -1,264 +0,0 @@ -import process from 'node:process'; -import fs from 'node:fs'; -import nodePath from 'node:path'; -import mergeStreams from '@sindresorhus/merge-streams'; -import fastGlob from 'fast-glob'; -import {isDirectory, isDirectorySync} from 'path-type'; -import {toPath} from 'unicorn-magic'; -import { - GITIGNORE_FILES_PATTERN, - isIgnoredByIgnoreFiles, - isIgnoredByIgnoreFilesSync, -} from './ignore.js'; -import {isNegativePattern} from './utilities.js'; - -const assertPatternsInput = patterns => { - if (patterns.some(pattern => typeof pattern !== 'string')) { - throw new TypeError('Patterns must be a string or an array of strings'); - } -}; - -const normalizePathForDirectoryGlob = (filePath, cwd) => { - const path = isNegativePattern(filePath) ? filePath.slice(1) : filePath; - return nodePath.isAbsolute(path) ? path : nodePath.join(cwd, path); -}; - -const getDirectoryGlob = ({directoryPath, files, extensions}) => { - const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(',')}}` : extensions[0]}` : ''; - return files - ? files.map(file => nodePath.posix.join(directoryPath, `**/${nodePath.extname(file) ? file : `${file}${extensionGlob}`}`)) - : [nodePath.posix.join(directoryPath, `**${extensionGlob ? `/*${extensionGlob}` : ''}`)]; -}; - -const directoryToGlob = async (directoryPaths, { - cwd = process.cwd(), - files, - extensions, -} = {}) => { - const globs = await Promise.all(directoryPaths.map(async directoryPath => - (await isDirectory(normalizePathForDirectoryGlob(directoryPath, cwd))) ? getDirectoryGlob({directoryPath, files, extensions}) : directoryPath), - ); - - return globs.flat(); -}; - -const directoryToGlobSync = (directoryPaths, { - cwd = process.cwd(), - files, - extensions, -} = {}) => directoryPaths.flatMap(directoryPath => isDirectorySync(normalizePathForDirectoryGlob(directoryPath, cwd)) ? getDirectoryGlob({directoryPath, files, extensions}) : directoryPath); - -const toPatternsArray = patterns => { - patterns = [...new Set([patterns].flat())]; - assertPatternsInput(patterns); - return patterns; -}; - -const checkCwdOption = cwd => { - if (!cwd) { - return; - } - - let stat; - try { - stat = fs.statSync(cwd); - } catch { - return; - } - - if (!stat.isDirectory()) { - throw new Error('The `cwd` option must be a path to a directory'); - } -}; - -const normalizeOptions = (options = {}) => { - options = { - ...options, - ignore: options.ignore ?? [], - expandDirectories: options.expandDirectories ?? true, - cwd: toPath(options.cwd), - }; - - checkCwdOption(options.cwd); - - return options; -}; - -const normalizeArguments = function_ => async (patterns, options) => function_(toPatternsArray(patterns), normalizeOptions(options)); -const normalizeArgumentsSync = function_ => (patterns, options) => function_(toPatternsArray(patterns), normalizeOptions(options)); - -const getIgnoreFilesPatterns = options => { - const {ignoreFiles, gitignore} = options; - - const patterns = ignoreFiles ? toPatternsArray(ignoreFiles) : []; - if (gitignore) { - patterns.push(GITIGNORE_FILES_PATTERN); - } - - return patterns; -}; - -const getFilter = async options => { - const ignoreFilesPatterns = getIgnoreFilesPatterns(options); - return createFilterFunction( - ignoreFilesPatterns.length > 0 && await isIgnoredByIgnoreFiles(ignoreFilesPatterns, options), - ); -}; - -const getFilterSync = options => { - const ignoreFilesPatterns = getIgnoreFilesPatterns(options); - return createFilterFunction( - ignoreFilesPatterns.length > 0 && isIgnoredByIgnoreFilesSync(ignoreFilesPatterns, options), - ); -}; - -const createFilterFunction = isIgnored => { - const seen = new Set(); - - return fastGlobResult => { - const pathKey = nodePath.normalize(fastGlobResult.path ?? fastGlobResult); - - if (seen.has(pathKey) || (isIgnored && isIgnored(pathKey))) { - return false; - } - - seen.add(pathKey); - - return true; - }; -}; - -const unionFastGlobResults = (results, filter) => results.flat().filter(fastGlobResult => filter(fastGlobResult)); - -const convertNegativePatterns = (patterns, options) => { - const tasks = []; - - while (patterns.length > 0) { - const index = patterns.findIndex(pattern => isNegativePattern(pattern)); - - if (index === -1) { - tasks.push({patterns, options}); - break; - } - - const ignorePattern = patterns[index].slice(1); - - for (const task of tasks) { - task.options.ignore.push(ignorePattern); - } - - if (index !== 0) { - tasks.push({ - patterns: patterns.slice(0, index), - options: { - ...options, - ignore: [ - ...options.ignore, - ignorePattern, - ], - }, - }); - } - - patterns = patterns.slice(index + 1); - } - - return tasks; -}; - -const normalizeExpandDirectoriesOption = (options, cwd) => ({ - ...(cwd ? {cwd} : {}), - ...(Array.isArray(options) ? {files: options} : options), -}); - -const generateTasks = async (patterns, options) => { - const globTasks = convertNegativePatterns(patterns, options); - - const {cwd, expandDirectories} = options; - - if (!expandDirectories) { - return globTasks; - } - - const directoryToGlobOptions = normalizeExpandDirectoriesOption(expandDirectories, cwd); - - return Promise.all( - globTasks.map(async task => { - let {patterns, options} = task; - - [ - patterns, - options.ignore, - ] = await Promise.all([ - directoryToGlob(patterns, directoryToGlobOptions), - directoryToGlob(options.ignore, {cwd}), - ]); - - return {patterns, options}; - }), - ); -}; - -const generateTasksSync = (patterns, options) => { - const globTasks = convertNegativePatterns(patterns, options); - const {cwd, expandDirectories} = options; - - if (!expandDirectories) { - return globTasks; - } - - const directoryToGlobSyncOptions = normalizeExpandDirectoriesOption(expandDirectories, cwd); - - return globTasks.map(task => { - let {patterns, options} = task; - patterns = directoryToGlobSync(patterns, directoryToGlobSyncOptions); - options.ignore = directoryToGlobSync(options.ignore, {cwd}); - return {patterns, options}; - }); -}; - -export const globby = normalizeArguments(async (patterns, options) => { - const [ - tasks, - filter, - ] = await Promise.all([ - generateTasks(patterns, options), - getFilter(options), - ]); - - const results = await Promise.all(tasks.map(task => fastGlob(task.patterns, task.options))); - return unionFastGlobResults(results, filter); -}); - -export const globbySync = normalizeArgumentsSync((patterns, options) => { - const tasks = generateTasksSync(patterns, options); - const filter = getFilterSync(options); - const results = tasks.map(task => fastGlob.sync(task.patterns, task.options)); - return unionFastGlobResults(results, filter); -}); - -export const globbyStream = normalizeArgumentsSync((patterns, options) => { - const tasks = generateTasksSync(patterns, options); - const filter = getFilterSync(options); - const streams = tasks.map(task => fastGlob.stream(task.patterns, task.options)); - const stream = mergeStreams(streams).filter(fastGlobResult => filter(fastGlobResult)); - - // TODO: Make it return a web stream at some point. - // return Readable.toWeb(stream); - - return stream; -}); - -export const isDynamicPattern = normalizeArgumentsSync( - (patterns, options) => patterns.some(pattern => fastGlob.isDynamicPattern(pattern, options)), -); - -export const generateGlobTasks = normalizeArguments(generateTasks); -export const generateGlobTasksSync = normalizeArgumentsSync(generateTasksSync); - -export { - isGitIgnored, - isGitIgnoredSync, -} from './ignore.js'; - -export const {convertPathToPattern} = fastGlob; diff --git a/node_modules/globby/license b/node_modules/globby/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/globby/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/globby/package.json b/node_modules/globby/package.json deleted file mode 100644 index 6d2c5ab..0000000 --- a/node_modules/globby/package.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "name": "globby", - "version": "14.0.2", - "description": "User-friendly glob matching", - "license": "MIT", - "repository": "sindresorhus/globby", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "email": "sindresorhus@gmail.com", - "name": "Sindre Sorhus", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": { - "types": "./index.d.ts", - "default": "./index.js" - }, - "sideEffects": false, - "engines": { - "node": ">=18" - }, - "scripts": { - "bench": "npm update @globby/main-branch glob-stream fast-glob && node bench.js", - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "ignore.js", - "utilities.js" - ], - "keywords": [ - "all", - "array", - "directories", - "expand", - "files", - "filesystem", - "filter", - "find", - "fnmatch", - "folders", - "fs", - "glob", - "globbing", - "globs", - "gulpfriendly", - "match", - "matcher", - "minimatch", - "multi", - "multiple", - "paths", - "pattern", - "patterns", - "traverse", - "util", - "utility", - "wildcard", - "wildcards", - "promise", - "gitignore", - "git" - ], - "dependencies": { - "@sindresorhus/merge-streams": "^2.1.0", - "fast-glob": "^3.3.2", - "ignore": "^5.2.4", - "path-type": "^5.0.0", - "slash": "^5.1.0", - "unicorn-magic": "^0.1.0" - }, - "devDependencies": { - "@globby/main-branch": "sindresorhus/globby#main", - "@types/node": "^20.9.0", - "ava": "^5.3.1", - "benchmark": "2.1.4", - "glob-stream": "^8.0.0", - "tempy": "^3.1.0", - "tsd": "^0.30.4", - "xo": "^0.57.0" - }, - "xo": { - "ignores": [ - "fixtures" - ] - }, - "ava": { - "files": [ - "!tests/utilities.js" - ], - "workerThreads": false - } -} diff --git a/node_modules/globby/readme.md b/node_modules/globby/readme.md deleted file mode 100644 index 71243de..0000000 --- a/node_modules/globby/readme.md +++ /dev/null @@ -1,177 +0,0 @@ -# globby - -> User-friendly glob matching - -Based on [`fast-glob`](https://github.com/mrmlnc/fast-glob) but adds a bunch of useful features. - -## Features - -- Promise API -- Multiple patterns -- Negated patterns: `['foo*', '!foobar']` -- Expands directories: `foo` → `foo/**/*` -- Supports `.gitignore` and similar ignore config files -- Supports `URL` as `cwd` - -## Install - -```sh -npm install globby -``` - -## Usage - -``` -├── unicorn -├── cake -└── rainbow -``` - -```js -import {globby} from 'globby'; - -const paths = await globby(['*', '!cake']); - -console.log(paths); -//=> ['unicorn', 'rainbow'] -``` - -## API - -Note that glob patterns can only contain forward-slashes, not backward-slashes, so if you want to construct a glob pattern from path components, you need to use `path.posix.join()` instead of `path.join()`. - -### globby(patterns, options?) - -Returns a `Promise` of matching paths. - -#### patterns - -Type: `string | string[]` - -See supported `minimatch` [patterns](https://github.com/isaacs/minimatch#usage). - -#### options - -Type: `object` - -See the [`fast-glob` options](https://github.com/mrmlnc/fast-glob#options-3) in addition to the ones below. - -##### expandDirectories - -Type: `boolean | string[] | object`\ -Default: `true` - -If set to `true`, `globby` will automatically glob directories for you. If you define an `Array` it will only glob files that matches the patterns inside the `Array`. You can also define an `object` with `files` and `extensions` like below: - -```js -import {globby} from 'globby'; - -const paths = await globby('images', { - expandDirectories: { - files: ['cat', 'unicorn', '*.jpg'], - extensions: ['png'] - } -}); - -console.log(paths); -//=> ['cat.png', 'unicorn.png', 'cow.jpg', 'rainbow.jpg'] -``` - -Note that if you set this option to `false`, you won't get back matched directories unless you set `onlyFiles: false`. - -##### gitignore - -Type: `boolean`\ -Default: `false` - -Respect ignore patterns in `.gitignore` files that apply to the globbed files. - -##### ignoreFiles - -Type: `string | string[]`\ -Default: `undefined` - -Glob patterns to look for ignore files, which are then used to ignore globbed files. - -This is a more generic form of the `gitignore` option, allowing you to find ignore files with a [compatible syntax](http://git-scm.com/docs/gitignore). For instance, this works with Babel's `.babelignore`, Prettier's `.prettierignore`, or ESLint's `.eslintignore` files. - -### globbySync(patterns, options?) - -Returns `string[]` of matching paths. - -### globbyStream(patterns, options?) - -Returns a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_readable_streams) of matching paths. - -For example, loop over glob matches in a [`for await...of` loop](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) like this: - -```js -import {globbyStream} from 'globby'; - -for await (const path of globbyStream('*.tmp')) { - console.log(path); -} -``` - -### convertPathToPattern(path) - -Convert a path to a pattern. [Learn more.](https://github.com/mrmlnc/fast-glob#convertpathtopatternpath) - -### generateGlobTasks(patterns, options?) - -Returns an `Promise` in the format `{patterns: string[], options: Object}`, which can be passed as arguments to [`fast-glob`](https://github.com/mrmlnc/fast-glob). This is useful for other globbing-related packages. - -Note that you should avoid running the same tasks multiple times as they contain a file system cache. Instead, run this method each time to ensure file system changes are taken into consideration. - -### generateGlobTasksSync(patterns, options?) - -Returns an `object[]` in the format `{patterns: string[], options: Object}`, which can be passed as arguments to [`fast-glob`](https://github.com/mrmlnc/fast-glob). This is useful for other globbing-related packages. - -Takes the same arguments as `generateGlobTasks`. - -### isDynamicPattern(patterns, options?) - -Returns a `boolean` of whether there are any special glob characters in the `patterns`. - -Note that the options affect the results. - -This function is backed by [`fast-glob`](https://github.com/mrmlnc/fast-glob#isdynamicpatternpattern-options). - -### isGitIgnored(options?) - -Returns a `Promise<(path: URL | string) => boolean>` indicating whether a given path is ignored via a `.gitignore` file. - -Takes `cwd?: URL | string` as options. - -```js -import {isGitIgnored} from 'globby'; - -const isIgnored = await isGitIgnored(); - -console.log(isIgnored('some/file')); -``` - -### isGitIgnoredSync(options?) - -Returns a `(path: URL | string) => boolean` indicating whether a given path is ignored via a `.gitignore` file. - -Takes `cwd?: URL | string` as options. - -## Globbing patterns - -Just a quick overview. - -- `*` matches any number of characters, but not `/` -- `?` matches a single character, but not `/` -- `**` matches any number of characters, including `/`, as long as it's the only thing in a path part -- `{}` allows for a comma-separated list of "or" expressions -- `!` at the beginning of a pattern will negate the match - -[Various patterns and expected matches.](https://github.com/sindresorhus/multimatch/blob/main/test/test.js) - -## Related - -- [multimatch](https://github.com/sindresorhus/multimatch) - Match against a list instead of the filesystem -- [matcher](https://github.com/sindresorhus/matcher) - Simple wildcard matching -- [del](https://github.com/sindresorhus/del) - Delete files and directories -- [make-dir](https://github.com/sindresorhus/make-dir) - Make a directory and its parents if needed diff --git a/node_modules/globby/utilities.js b/node_modules/globby/utilities.js deleted file mode 100644 index 1ed1f27..0000000 --- a/node_modules/globby/utilities.js +++ /dev/null @@ -1 +0,0 @@ -export const isNegativePattern = pattern => pattern[0] === '!'; diff --git a/node_modules/graceful-fs/LICENSE b/node_modules/graceful-fs/LICENSE deleted file mode 100644 index e906a25..0000000 --- a/node_modules/graceful-fs/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/graceful-fs/README.md b/node_modules/graceful-fs/README.md deleted file mode 100644 index 82d6e4d..0000000 --- a/node_modules/graceful-fs/README.md +++ /dev/null @@ -1,143 +0,0 @@ -# graceful-fs - -graceful-fs functions as a drop-in replacement for the fs module, -making various improvements. - -The improvements are meant to normalize behavior across different -platforms and environments, and to make filesystem access more -resilient to errors. - -## Improvements over [fs module](https://nodejs.org/api/fs.html) - -* Queues up `open` and `readdir` calls, and retries them once - something closes if there is an EMFILE error from too many file - descriptors. -* fixes `lchmod` for Node versions prior to 0.6.2. -* implements `fs.lutimes` if possible. Otherwise it becomes a noop. -* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or - `lchown` if the user isn't root. -* makes `lchmod` and `lchown` become noops, if not available. -* retries reading a file if `read` results in EAGAIN error. - -On Windows, it retries renaming a file for up to one second if `EACCESS` -or `EPERM` error occurs, likely because antivirus software has locked -the directory. - -## USAGE - -```javascript -// use just like fs -var fs = require('graceful-fs') - -// now go and do stuff with it... -fs.readFile('some-file-or-whatever', (err, data) => { - // Do stuff here. -}) -``` - -## Sync methods - -This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync -methods. If you use sync methods which open file descriptors then you are -responsible for dealing with any errors. - -This is a known limitation, not a bug. - -## Global Patching - -If you want to patch the global fs module (or any other fs-like -module) you can do this: - -```javascript -// Make sure to read the caveat below. -var realFs = require('fs') -var gracefulFs = require('graceful-fs') -gracefulFs.gracefulify(realFs) -``` - -This should only ever be done at the top-level application layer, in -order to delay on EMFILE errors from any fs-using dependencies. You -should **not** do this in a library, because it can cause unexpected -delays in other parts of the program. - -## Changes - -This module is fairly stable at this point, and used by a lot of -things. That being said, because it implements a subtle behavior -change in a core part of the node API, even modest changes can be -extremely breaking, and the versioning is thus biased towards -bumping the major when in doubt. - -The main change between major versions has been switching between -providing a fully-patched `fs` module vs monkey-patching the node core -builtin, and the approach by which a non-monkey-patched `fs` was -created. - -The goal is to trade `EMFILE` errors for slower fs operations. So, if -you try to open a zillion files, rather than crashing, `open` -operations will be queued up and wait for something else to `close`. - -There are advantages to each approach. Monkey-patching the fs means -that no `EMFILE` errors can possibly occur anywhere in your -application, because everything is using the same core `fs` module, -which is patched. However, it can also obviously cause undesirable -side-effects, especially if the module is loaded multiple times. - -Implementing a separate-but-identical patched `fs` module is more -surgical (and doesn't run the risk of patching multiple times), but -also imposes the challenge of keeping in sync with the core module. - -The current approach loads the `fs` module, and then creates a -lookalike object that has all the same methods, except a few that are -patched. It is safe to use in all versions of Node from 0.8 through -7.0. - -### v4 - -* Do not monkey-patch the fs module. This module may now be used as a - drop-in dep, and users can opt into monkey-patching the fs builtin - if their app requires it. - -### v3 - -* Monkey-patch fs, because the eval approach no longer works on recent - node. -* fixed possible type-error throw if rename fails on windows -* verify that we *never* get EMFILE errors -* Ignore ENOSYS from chmod/chown -* clarify that graceful-fs must be used as a drop-in - -### v2.1.0 - -* Use eval rather than monkey-patching fs. -* readdir: Always sort the results -* win32: requeue a file if error has an OK status - -### v2.0 - -* A return to monkey patching -* wrap process.cwd - -### v1.1 - -* wrap readFile -* Wrap fs.writeFile. -* readdir protection -* Don't clobber the fs builtin -* Handle fs.read EAGAIN errors by trying again -* Expose the curOpen counter -* No-op lchown/lchmod if not implemented -* fs.rename patch only for win32 -* Patch fs.rename to handle AV software on Windows -* Close #4 Chown should not fail on einval or eperm if non-root -* Fix isaacs/fstream#1 Only wrap fs one time -* Fix #3 Start at 1024 max files, then back off on EMFILE -* lutimes that doens't blow up on Linux -* A full on-rewrite using a queue instead of just swallowing the EMFILE error -* Wrap Read/Write streams as well - -### 1.0 - -* Update engines for node 0.6 -* Be lstat-graceful on Windows -* first diff --git a/node_modules/graceful-fs/clone.js b/node_modules/graceful-fs/clone.js deleted file mode 100644 index dff3cc8..0000000 --- a/node_modules/graceful-fs/clone.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict' - -module.exports = clone - -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} - -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj - - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) - - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) - - return copy -} diff --git a/node_modules/graceful-fs/graceful-fs.js b/node_modules/graceful-fs/graceful-fs.js deleted file mode 100644 index 8d5b89e..0000000 --- a/node_modules/graceful-fs/graceful-fs.js +++ /dev/null @@ -1,448 +0,0 @@ -var fs = require('fs') -var polyfills = require('./polyfills.js') -var legacy = require('./legacy-streams.js') -var clone = require('./clone.js') - -var util = require('util') - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - resetQueue() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - resetQueue() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - require('assert').equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb, startTime) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb, startTime) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb, startTime) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 - } - return go$copyFile(src, dest, flags, cb) - - function go$copyFile (src, dest, flags, cb, startTime) { - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - var noReaddirOptionVersions = /^v[0-5]\./ - function readdir (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - var go$readdir = noReaddirOptionVersions.test(process.version) - ? function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, fs$readdirCallback( - path, options, cb, startTime - )) - } - : function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, options, fs$readdirCallback( - path, options, cb, startTime - )) - } - - return go$readdir(path, options, cb) - - function fs$readdirCallback (path, options, cb, startTime) { - return function (err, files) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([ - go$readdir, - [path, options, cb], - err, - startTime || Date.now(), - Date.now() - ]) - else { - if (files && files.sort) - files.sort() - - if (typeof cb === 'function') - cb.call(this, err, files) - } - } - } - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } - - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } - - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null - - return go$open(path, flags, mode, cb) - - function go$open (path, flags, mode, cb, startTime) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) - retry() -} - -// keep track of the timeout between retry() calls -var retryTimer - -// reset the startTime and lastTime to now -// this resets the start of the 60 second overall timeout as well as the -// delay between attempts so that we'll retry these jobs sooner -function resetQueue () { - var now = Date.now() - for (var i = 0; i < fs[gracefulQueue].length; ++i) { - // entries that are only a length of 2 are from an older version, don't - // bother modifying those since they'll be retried anyway. - if (fs[gracefulQueue][i].length > 2) { - fs[gracefulQueue][i][3] = now // startTime - fs[gracefulQueue][i][4] = now // lastTime - } - } - // call retry to make sure we're actively processing the queue - retry() -} - -function retry () { - // clear the timer and remove it to help prevent unintended concurrency - clearTimeout(retryTimer) - retryTimer = undefined - - if (fs[gracefulQueue].length === 0) - return - - var elem = fs[gracefulQueue].shift() - var fn = elem[0] - var args = elem[1] - // these items may be unset if they were added by an older graceful-fs - var err = elem[2] - var startTime = elem[3] - var lastTime = elem[4] - - // if we don't have a startTime we have no way of knowing if we've waited - // long enough, so go ahead and retry this item now - if (startTime === undefined) { - debug('RETRY', fn.name, args) - fn.apply(null, args) - } else if (Date.now() - startTime >= 60000) { - // it's been more than 60 seconds total, bail now - debug('TIMEOUT', fn.name, args) - var cb = args.pop() - if (typeof cb === 'function') - cb.call(null, err) - } else { - // the amount of time between the last attempt and right now - var sinceAttempt = Date.now() - lastTime - // the amount of time between when we first tried, and when we last tried - // rounded up to at least 1 - var sinceStart = Math.max(lastTime - startTime, 1) - // backoff. wait longer than the total time we've been retrying, but only - // up to a maximum of 100ms - var desiredDelay = Math.min(sinceStart * 1.2, 100) - // it's been long enough since the last retry, do it again - if (sinceAttempt >= desiredDelay) { - debug('RETRY', fn.name, args) - fn.apply(null, args.concat([startTime])) - } else { - // if we can't do this job yet, push it to the end of the queue - // and let the next iteration check again - fs[gracefulQueue].push(elem) - } - } - - // schedule our next run if one isn't already scheduled - if (retryTimer === undefined) { - retryTimer = setTimeout(retry, 0) - } -} diff --git a/node_modules/graceful-fs/legacy-streams.js b/node_modules/graceful-fs/legacy-streams.js deleted file mode 100644 index d617b50..0000000 --- a/node_modules/graceful-fs/legacy-streams.js +++ /dev/null @@ -1,118 +0,0 @@ -var Stream = require('stream').Stream - -module.exports = legacy - -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); - - Stream.call(this); - - var self = this; - - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; - - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.encoding) this.setEncoding(this.encoding); - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } - - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - - this.pos = this.start; - } - - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } - - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } - - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); - - Stream.call(this); - - this.path = path; - this.fd = null; - this.writable = true; - - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; - } - - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); - } - } -} diff --git a/node_modules/graceful-fs/package.json b/node_modules/graceful-fs/package.json deleted file mode 100644 index 87babf0..0000000 --- a/node_modules/graceful-fs/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "graceful-fs", - "description": "A drop-in replacement for fs, making various improvements.", - "version": "4.2.11", - "repository": { - "type": "git", - "url": "https://github.com/isaacs/node-graceful-fs" - }, - "main": "graceful-fs.js", - "directories": { - "test": "test" - }, - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "test": "nyc --silent node test.js | tap -c -", - "posttest": "nyc report" - }, - "keywords": [ - "fs", - "module", - "reading", - "retry", - "retries", - "queue", - "error", - "errors", - "handling", - "EMFILE", - "EAGAIN", - "EINVAL", - "EPERM", - "EACCESS" - ], - "license": "ISC", - "devDependencies": { - "import-fresh": "^2.0.0", - "mkdirp": "^0.5.0", - "rimraf": "^2.2.8", - "tap": "^16.3.4" - }, - "files": [ - "fs.js", - "graceful-fs.js", - "legacy-streams.js", - "polyfills.js", - "clone.js" - ], - "tap": { - "reporter": "classic" - } -} diff --git a/node_modules/graceful-fs/polyfills.js b/node_modules/graceful-fs/polyfills.js deleted file mode 100644 index 453f1a9..0000000 --- a/node_modules/graceful-fs/polyfills.js +++ /dev/null @@ -1,355 +0,0 @@ -var constants = require('constants') - -var origCwd = process.cwd -var cwd = null - -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} - -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} - -module.exports = patch - -function patch (fs) { - // (re-)implement some things that are known busted or missing. - - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) - } - - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) - } - - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. - - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) - - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) - - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) - - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) - - // if lchmod/lchown do not exist, then make them no-ops - if (fs.chmod && !fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (fs.chown && !fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } - - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. - - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = typeof fs.rename !== 'function' ? fs.rename - : (function (fs$rename) { - function rename (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) - return rename - })(fs.rename) - } - - // if read() returns EAGAIN, then just try it again. - fs.read = typeof fs.read !== 'function' ? fs.read - : (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) - - fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync - : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) - - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } - - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - - } else if (fs.futimes) { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } - - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } - - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - return stats; - } - } - - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true - - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true - } - - return false - } -} diff --git a/node_modules/ignore/LICENSE-MIT b/node_modules/ignore/LICENSE-MIT deleted file mode 100644 index 825533e..0000000 --- a/node_modules/ignore/LICENSE-MIT +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2013 Kael Zhang , contributors -http://kael.me/ - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/ignore/README.md b/node_modules/ignore/README.md deleted file mode 100644 index 50d8882..0000000 --- a/node_modules/ignore/README.md +++ /dev/null @@ -1,412 +0,0 @@ - - - - - - - - - - - - - -
LinuxOS XWindowsCoverageDownloads
- - Build Status - - - Windows Build Status - - - Coverage Status - - - npm module downloads per month -
- -# ignore - -`ignore` is a manager, filter and parser which implemented in pure JavaScript according to the [.gitignore spec 2.22.1](http://git-scm.com/docs/gitignore). - -`ignore` is used by eslint, gitbook and [many others](https://www.npmjs.com/browse/depended/ignore). - -Pay **ATTENTION** that [`minimatch`](https://www.npmjs.org/package/minimatch) (which used by `fstream-ignore`) does not follow the gitignore spec. - -To filter filenames according to a .gitignore file, I recommend this npm package, `ignore`. - -To parse an `.npmignore` file, you should use `minimatch`, because an `.npmignore` file is parsed by npm using `minimatch` and it does not work in the .gitignore way. - -### Tested on - -`ignore` is fully tested, and has more than **five hundreds** of unit tests. - -- Linux + Node: `0.8` - `7.x` -- Windows + Node: `0.10` - `7.x`, node < `0.10` is not tested due to the lack of support of appveyor. - -Actually, `ignore` does not rely on any versions of node specially. - -Since `4.0.0`, ignore will no longer support `node < 6` by default, to use in node < 6, `require('ignore/legacy')`. For details, see [CHANGELOG](https://github.com/kaelzhang/node-ignore/blob/master/CHANGELOG.md). - -## Table Of Main Contents - -- [Usage](#usage) -- [`Pathname` Conventions](#pathname-conventions) -- See Also: - - [`glob-gitignore`](https://www.npmjs.com/package/glob-gitignore) matches files using patterns and filters them according to gitignore rules. -- [Upgrade Guide](#upgrade-guide) - -## Install - -```sh -npm i ignore -``` - -## Usage - -```js -import ignore from 'ignore' -const ig = ignore().add(['.abc/*', '!.abc/d/']) -``` - -### Filter the given paths - -```js -const paths = [ - '.abc/a.js', // filtered out - '.abc/d/e.js' // included -] - -ig.filter(paths) // ['.abc/d/e.js'] -ig.ignores('.abc/a.js') // true -``` - -### As the filter function - -```js -paths.filter(ig.createFilter()); // ['.abc/d/e.js'] -``` - -### Win32 paths will be handled - -```js -ig.filter(['.abc\\a.js', '.abc\\d\\e.js']) -// if the code above runs on windows, the result will be -// ['.abc\\d\\e.js'] -``` - -## Why another ignore? - -- `ignore` is a standalone module, and is much simpler so that it could easy work with other programs, unlike [isaacs](https://npmjs.org/~isaacs)'s [fstream-ignore](https://npmjs.org/package/fstream-ignore) which must work with the modules of the fstream family. - -- `ignore` only contains utility methods to filter paths according to the specified ignore rules, so - - `ignore` never try to find out ignore rules by traversing directories or fetching from git configurations. - - `ignore` don't cares about sub-modules of git projects. - -- Exactly according to [gitignore man page](http://git-scm.com/docs/gitignore), fixes some known matching issues of fstream-ignore, such as: - - '`/*.js`' should only match '`a.js`', but not '`abc/a.js`'. - - '`**/foo`' should match '`foo`' anywhere. - - Prevent re-including a file if a parent directory of that file is excluded. - - Handle trailing whitespaces: - - `'a '`(one space) should not match `'a '`(two spaces). - - `'a \ '` matches `'a '` - - All test cases are verified with the result of `git check-ignore`. - -# Methods - -## .add(pattern: string | Ignore): this -## .add(patterns: Array): this - -- **pattern** `String | Ignore` An ignore pattern string, or the `Ignore` instance -- **patterns** `Array` Array of ignore patterns. - -Adds a rule or several rules to the current manager. - -Returns `this` - -Notice that a line starting with `'#'`(hash) is treated as a comment. Put a backslash (`'\'`) in front of the first hash for patterns that begin with a hash, if you want to ignore a file with a hash at the beginning of the filename. - -```js -ignore().add('#abc').ignores('#abc') // false -ignore().add('\\#abc').ignores('#abc') // true -``` - -`pattern` could either be a line of ignore pattern or a string of multiple ignore patterns, which means we could just `ignore().add()` the content of a ignore file: - -```js -ignore() -.add(fs.readFileSync(filenameOfGitignore).toString()) -.filter(filenames) -``` - -`pattern` could also be an `ignore` instance, so that we could easily inherit the rules of another `Ignore` instance. - -## .addIgnoreFile(path) - -REMOVED in `3.x` for now. - -To upgrade `ignore@2.x` up to `3.x`, use - -```js -import fs from 'fs' - -if (fs.existsSync(filename)) { - ignore().add(fs.readFileSync(filename).toString()) -} -``` - -instead. - -## .filter(paths: Array<Pathname>): Array<Pathname> - -```ts -type Pathname = string -``` - -Filters the given array of pathnames, and returns the filtered array. - -- **paths** `Array.` The array of `pathname`s to be filtered. - -### `Pathname` Conventions: - -#### 1. `Pathname` should be a `path.relative()`d pathname - -`Pathname` should be a string that have been `path.join()`ed, or the return value of `path.relative()` to the current directory, - -```js -// WRONG, an error will be thrown -ig.ignores('./abc') - -// WRONG, for it will never happen, and an error will be thrown -// If the gitignore rule locates at the root directory, -// `'/abc'` should be changed to `'abc'`. -// ``` -// path.relative('/', '/abc') -> 'abc' -// ``` -ig.ignores('/abc') - -// WRONG, that it is an absolute path on Windows, an error will be thrown -ig.ignores('C:\\abc') - -// Right -ig.ignores('abc') - -// Right -ig.ignores(path.join('./abc')) // path.join('./abc') -> 'abc' -``` - -In other words, each `Pathname` here should be a relative path to the directory of the gitignore rules. - -Suppose the dir structure is: - -``` -/path/to/your/repo - |-- a - | |-- a.js - | - |-- .b - | - |-- .c - |-- .DS_store -``` - -Then the `paths` might be like this: - -```js -[ - 'a/a.js' - '.b', - '.c/.DS_store' -] -``` - -#### 2. filenames and dirnames - -`node-ignore` does NO `fs.stat` during path matching, so for the example below: - -```js -// First, we add a ignore pattern to ignore a directory -ig.add('config/') - -// `ig` does NOT know if 'config', in the real world, -// is a normal file, directory or something. - -ig.ignores('config') -// `ig` treats `config` as a file, so it returns `false` - -ig.ignores('config/') -// returns `true` -``` - -Specially for people who develop some library based on `node-ignore`, it is important to understand that. - -Usually, you could use [`glob`](http://npmjs.org/package/glob) with `option.mark = true` to fetch the structure of the current directory: - -```js -import glob from 'glob' - -glob('**', { - // Adds a / character to directory matches. - mark: true -}, (err, files) => { - if (err) { - return console.error(err) - } - - let filtered = ignore().add(patterns).filter(files) - console.log(filtered) -}) -``` - -## .ignores(pathname: Pathname): boolean - -> new in 3.2.0 - -Returns `Boolean` whether `pathname` should be ignored. - -```js -ig.ignores('.abc/a.js') // true -``` - -## .createFilter() - -Creates a filter function which could filter an array of paths with `Array.prototype.filter`. - -Returns `function(path)` the filter function. - -## .test(pathname: Pathname) since 5.0.0 - -Returns `TestResult` - -```ts -interface TestResult { - ignored: boolean - // true if the `pathname` is finally unignored by some negative pattern - unignored: boolean -} -``` - -- `{ignored: true, unignored: false}`: the `pathname` is ignored -- `{ignored: false, unignored: true}`: the `pathname` is unignored -- `{ignored: false, unignored: false}`: the `pathname` is never matched by any ignore rules. - -## static `ignore.isPathValid(pathname): boolean` since 5.0.0 - -Check whether the `pathname` is an valid `path.relative()`d path according to the [convention](#1-pathname-should-be-a-pathrelatived-pathname). - -This method is **NOT** used to check if an ignore pattern is valid. - -```js -ignore.isPathValid('./foo') // false -``` - -## ignore(options) - -### `options.ignorecase` since 4.0.0 - -Similar as the `core.ignorecase` option of [git-config](https://git-scm.com/docs/git-config), `node-ignore` will be case insensitive if `options.ignorecase` is set to `true` (the default value), otherwise case sensitive. - -```js -const ig = ignore({ - ignorecase: false -}) - -ig.add('*.png') - -ig.ignores('*.PNG') // false -``` - -### `options.ignoreCase?: boolean` since 5.2.0 - -Which is alternative to `options.ignoreCase` - -### `options.allowRelativePaths?: boolean` since 5.2.0 - -This option brings backward compatibility with projects which based on `ignore@4.x`. If `options.allowRelativePaths` is `true`, `ignore` will not check whether the given path to be tested is [`path.relative()`d](#pathname-conventions). - -However, passing a relative path, such as `'./foo'` or `'../foo'`, to test if it is ignored or not is not a good practise, which might lead to unexpected behavior - -```js -ignore({ - allowRelativePaths: true -}).ignores('../foo/bar.js') // And it will not throw -``` - -**** - -# Upgrade Guide - -## Upgrade 4.x -> 5.x - -Since `5.0.0`, if an invalid `Pathname` passed into `ig.ignores()`, an error will be thrown, unless `options.allowRelative = true` is passed to the `Ignore` factory. - -While `ignore < 5.0.0` did not make sure what the return value was, as well as - -```ts -.ignores(pathname: Pathname): boolean - -.filter(pathnames: Array): Array - -.createFilter(): (pathname: Pathname) => boolean - -.test(pathname: Pathname): {ignored: boolean, unignored: boolean} -``` - -See the convention [here](#1-pathname-should-be-a-pathrelatived-pathname) for details. - -If there are invalid pathnames, the conversion and filtration should be done by users. - -```js -import {isPathValid} from 'ignore' // introduced in 5.0.0 - -const paths = [ - // invalid - ////////////////// - '', - false, - '../foo', - '.', - ////////////////// - - // valid - 'foo' -] -.filter(isValidPath) - -ig.filter(paths) -``` - -## Upgrade 3.x -> 4.x - -Since `4.0.0`, `ignore` will no longer support node < 6, to use `ignore` in node < 6: - -```js -var ignore = require('ignore/legacy') -``` - -## Upgrade 2.x -> 3.x - -- All `options` of 2.x are unnecessary and removed, so just remove them. -- `ignore()` instance is no longer an [`EventEmitter`](nodejs.org/api/events.html), and all events are unnecessary and removed. -- `.addIgnoreFile()` is removed, see the [.addIgnoreFile](#addignorefilepath) section for details. - -**** - -# Collaborators - -- [@whitecolor](https://github.com/whitecolor) *Alex* -- [@SamyPesse](https://github.com/SamyPesse) *Samy Pessé* -- [@azproduction](https://github.com/azproduction) *Mikhail Davydov* -- [@TrySound](https://github.com/TrySound) *Bogdan Chadkin* -- [@JanMattner](https://github.com/JanMattner) *Jan Mattner* -- [@ntwb](https://github.com/ntwb) *Stephen Edgar* -- [@kasperisager](https://github.com/kasperisager) *Kasper Isager* -- [@sandersn](https://github.com/sandersn) *Nathan Shively-Sanders* diff --git a/node_modules/ignore/index.d.ts b/node_modules/ignore/index.d.ts deleted file mode 100644 index 970631e..0000000 --- a/node_modules/ignore/index.d.ts +++ /dev/null @@ -1,61 +0,0 @@ -type Pathname = string - -interface TestResult { - ignored: boolean - unignored: boolean -} - -export interface Ignore { - /** - * Adds one or several rules to the current manager. - * @param {string[]} patterns - * @returns IgnoreBase - */ - add(patterns: string | Ignore | readonly (string | Ignore)[]): this - - /** - * Filters the given array of pathnames, and returns the filtered array. - * NOTICE that each path here should be a relative path to the root of your repository. - * @param paths the array of paths to be filtered. - * @returns The filtered array of paths - */ - filter(pathnames: readonly Pathname[]): Pathname[] - - /** - * Creates a filter function which could filter - * an array of paths with Array.prototype.filter. - */ - createFilter(): (pathname: Pathname) => boolean - - /** - * Returns Boolean whether pathname should be ignored. - * @param {string} pathname a path to check - * @returns boolean - */ - ignores(pathname: Pathname): boolean - - /** - * Returns whether pathname should be ignored or unignored - * @param {string} pathname a path to check - * @returns TestResult - */ - test(pathname: Pathname): TestResult -} - -export interface Options { - ignorecase?: boolean - // For compatibility - ignoreCase?: boolean - allowRelativePaths?: boolean -} - -/** - * Creates new ignore manager. - */ -declare function ignore(options?: Options): Ignore - -declare namespace ignore { - export function isPathValid (pathname: string): boolean -} - -export default ignore diff --git a/node_modules/ignore/index.js b/node_modules/ignore/index.js deleted file mode 100644 index 9f0dbfe..0000000 --- a/node_modules/ignore/index.js +++ /dev/null @@ -1,636 +0,0 @@ -// A simple implementation of make-array -function makeArray (subject) { - return Array.isArray(subject) - ? subject - : [subject] -} - -const EMPTY = '' -const SPACE = ' ' -const ESCAPE = '\\' -const REGEX_TEST_BLANK_LINE = /^\s+$/ -const REGEX_INVALID_TRAILING_BACKSLASH = /(?:[^\\]|^)\\$/ -const REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION = /^\\!/ -const REGEX_REPLACE_LEADING_EXCAPED_HASH = /^\\#/ -const REGEX_SPLITALL_CRLF = /\r?\n/g -// /foo, -// ./foo, -// ../foo, -// . -// .. -const REGEX_TEST_INVALID_PATH = /^\.*\/|^\.+$/ - -const SLASH = '/' - -// Do not use ternary expression here, since "istanbul ignore next" is buggy -let TMP_KEY_IGNORE = 'node-ignore' -/* istanbul ignore else */ -if (typeof Symbol !== 'undefined') { - TMP_KEY_IGNORE = Symbol.for('node-ignore') -} -const KEY_IGNORE = TMP_KEY_IGNORE - -const define = (object, key, value) => - Object.defineProperty(object, key, {value}) - -const REGEX_REGEXP_RANGE = /([0-z])-([0-z])/g - -const RETURN_FALSE = () => false - -// Sanitize the range of a regular expression -// The cases are complicated, see test cases for details -const sanitizeRange = range => range.replace( - REGEX_REGEXP_RANGE, - (match, from, to) => from.charCodeAt(0) <= to.charCodeAt(0) - ? match - // Invalid range (out of order) which is ok for gitignore rules but - // fatal for JavaScript regular expression, so eliminate it. - : EMPTY -) - -// See fixtures #59 -const cleanRangeBackSlash = slashes => { - const {length} = slashes - return slashes.slice(0, length - length % 2) -} - -// > If the pattern ends with a slash, -// > it is removed for the purpose of the following description, -// > but it would only find a match with a directory. -// > In other words, foo/ will match a directory foo and paths underneath it, -// > but will not match a regular file or a symbolic link foo -// > (this is consistent with the way how pathspec works in general in Git). -// '`foo/`' will not match regular file '`foo`' or symbolic link '`foo`' -// -> ignore-rules will not deal with it, because it costs extra `fs.stat` call -// you could use option `mark: true` with `glob` - -// '`foo/`' should not continue with the '`..`' -const REPLACERS = [ - - [ - // remove BOM - // TODO: - // Other similar zero-width characters? - /^\uFEFF/, - () => EMPTY - ], - - // > Trailing spaces are ignored unless they are quoted with backslash ("\") - [ - // (a\ ) -> (a ) - // (a ) -> (a) - // (a ) -> (a) - // (a \ ) -> (a ) - /((?:\\\\)*?)(\\?\s+)$/, - (_, m1, m2) => m1 + ( - m2.indexOf('\\') === 0 - ? SPACE - : EMPTY - ) - ], - - // replace (\ ) with ' ' - // (\ ) -> ' ' - // (\\ ) -> '\\ ' - // (\\\ ) -> '\\ ' - [ - /(\\+?)\s/g, - (_, m1) => { - const {length} = m1 - return m1.slice(0, length - length % 2) + SPACE - } - ], - - // Escape metacharacters - // which is written down by users but means special for regular expressions. - - // > There are 12 characters with special meanings: - // > - the backslash \, - // > - the caret ^, - // > - the dollar sign $, - // > - the period or dot ., - // > - the vertical bar or pipe symbol |, - // > - the question mark ?, - // > - the asterisk or star *, - // > - the plus sign +, - // > - the opening parenthesis (, - // > - the closing parenthesis ), - // > - and the opening square bracket [, - // > - the opening curly brace {, - // > These special characters are often called "metacharacters". - [ - /[\\$.|*+(){^]/g, - match => `\\${match}` - ], - - [ - // > a question mark (?) matches a single character - /(?!\\)\?/g, - () => '[^/]' - ], - - // leading slash - [ - - // > A leading slash matches the beginning of the pathname. - // > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". - // A leading slash matches the beginning of the pathname - /^\//, - () => '^' - ], - - // replace special metacharacter slash after the leading slash - [ - /\//g, - () => '\\/' - ], - - [ - // > A leading "**" followed by a slash means match in all directories. - // > For example, "**/foo" matches file or directory "foo" anywhere, - // > the same as pattern "foo". - // > "**/foo/bar" matches file or directory "bar" anywhere that is directly - // > under directory "foo". - // Notice that the '*'s have been replaced as '\\*' - /^\^*\\\*\\\*\\\//, - - // '**/foo' <-> 'foo' - () => '^(?:.*\\/)?' - ], - - // starting - [ - // there will be no leading '/' - // (which has been replaced by section "leading slash") - // If starts with '**', adding a '^' to the regular expression also works - /^(?=[^^])/, - function startingReplacer () { - // If has a slash `/` at the beginning or middle - return !/\/(?!$)/.test(this) - // > Prior to 2.22.1 - // > If the pattern does not contain a slash /, - // > Git treats it as a shell glob pattern - // Actually, if there is only a trailing slash, - // git also treats it as a shell glob pattern - - // After 2.22.1 (compatible but clearer) - // > If there is a separator at the beginning or middle (or both) - // > of the pattern, then the pattern is relative to the directory - // > level of the particular .gitignore file itself. - // > Otherwise the pattern may also match at any level below - // > the .gitignore level. - ? '(?:^|\\/)' - - // > Otherwise, Git treats the pattern as a shell glob suitable for - // > consumption by fnmatch(3) - : '^' - } - ], - - // two globstars - [ - // Use lookahead assertions so that we could match more than one `'/**'` - /\\\/\\\*\\\*(?=\\\/|$)/g, - - // Zero, one or several directories - // should not use '*', or it will be replaced by the next replacer - - // Check if it is not the last `'/**'` - (_, index, str) => index + 6 < str.length - - // case: /**/ - // > A slash followed by two consecutive asterisks then a slash matches - // > zero or more directories. - // > For example, "a/**/b" matches "a/b", "a/x/b", "a/x/y/b" and so on. - // '/**/' - ? '(?:\\/[^\\/]+)*' - - // case: /** - // > A trailing `"/**"` matches everything inside. - - // #21: everything inside but it should not include the current folder - : '\\/.+' - ], - - // normal intermediate wildcards - [ - // Never replace escaped '*' - // ignore rule '\*' will match the path '*' - - // 'abc.*/' -> go - // 'abc.*' -> skip this rule, - // coz trailing single wildcard will be handed by [trailing wildcard] - /(^|[^\\]+)(\\\*)+(?=.+)/g, - - // '*.js' matches '.js' - // '*.js' doesn't match 'abc' - (_, p1, p2) => { - // 1. - // > An asterisk "*" matches anything except a slash. - // 2. - // > Other consecutive asterisks are considered regular asterisks - // > and will match according to the previous rules. - const unescaped = p2.replace(/\\\*/g, '[^\\/]*') - return p1 + unescaped - } - ], - - [ - // unescape, revert step 3 except for back slash - // For example, if a user escape a '\\*', - // after step 3, the result will be '\\\\\\*' - /\\\\\\(?=[$.|*+(){^])/g, - () => ESCAPE - ], - - [ - // '\\\\' -> '\\' - /\\\\/g, - () => ESCAPE - ], - - [ - // > The range notation, e.g. [a-zA-Z], - // > can be used to match one of the characters in a range. - - // `\` is escaped by step 3 - /(\\)?\[([^\]/]*?)(\\*)($|\])/g, - (match, leadEscape, range, endEscape, close) => leadEscape === ESCAPE - // '\\[bar]' -> '\\\\[bar\\]' - ? `\\[${range}${cleanRangeBackSlash(endEscape)}${close}` - : close === ']' - ? endEscape.length % 2 === 0 - // A normal case, and it is a range notation - // '[bar]' - // '[bar\\\\]' - ? `[${sanitizeRange(range)}${endEscape}]` - // Invalid range notaton - // '[bar\\]' -> '[bar\\\\]' - : '[]' - : '[]' - ], - - // ending - [ - // 'js' will not match 'js.' - // 'ab' will not match 'abc' - /(?:[^*])$/, - - // WTF! - // https://git-scm.com/docs/gitignore - // changes in [2.22.1](https://git-scm.com/docs/gitignore/2.22.1) - // which re-fixes #24, #38 - - // > If there is a separator at the end of the pattern then the pattern - // > will only match directories, otherwise the pattern can match both - // > files and directories. - - // 'js*' will not match 'a.js' - // 'js/' will not match 'a.js' - // 'js' will match 'a.js' and 'a.js/' - match => /\/$/.test(match) - // foo/ will not match 'foo' - ? `${match}$` - // foo matches 'foo' and 'foo/' - : `${match}(?=$|\\/$)` - ], - - // trailing wildcard - [ - /(\^|\\\/)?\\\*$/, - (_, p1) => { - const prefix = p1 - // '\^': - // '/*' does not match EMPTY - // '/*' does not match everything - - // '\\\/': - // 'abc/*' does not match 'abc/' - ? `${p1}[^/]+` - - // 'a*' matches 'a' - // 'a*' matches 'aa' - : '[^/]*' - - return `${prefix}(?=$|\\/$)` - } - ], -] - -// A simple cache, because an ignore rule only has only one certain meaning -const regexCache = Object.create(null) - -// @param {pattern} -const makeRegex = (pattern, ignoreCase) => { - let source = regexCache[pattern] - - if (!source) { - source = REPLACERS.reduce( - (prev, [matcher, replacer]) => - prev.replace(matcher, replacer.bind(pattern)), - pattern - ) - regexCache[pattern] = source - } - - return ignoreCase - ? new RegExp(source, 'i') - : new RegExp(source) -} - -const isString = subject => typeof subject === 'string' - -// > A blank line matches no files, so it can serve as a separator for readability. -const checkPattern = pattern => pattern - && isString(pattern) - && !REGEX_TEST_BLANK_LINE.test(pattern) - && !REGEX_INVALID_TRAILING_BACKSLASH.test(pattern) - - // > A line starting with # serves as a comment. - && pattern.indexOf('#') !== 0 - -const splitPattern = pattern => pattern.split(REGEX_SPLITALL_CRLF) - -class IgnoreRule { - constructor ( - origin, - pattern, - negative, - regex - ) { - this.origin = origin - this.pattern = pattern - this.negative = negative - this.regex = regex - } -} - -const createRule = (pattern, ignoreCase) => { - const origin = pattern - let negative = false - - // > An optional prefix "!" which negates the pattern; - if (pattern.indexOf('!') === 0) { - negative = true - pattern = pattern.substr(1) - } - - pattern = pattern - // > Put a backslash ("\") in front of the first "!" for patterns that - // > begin with a literal "!", for example, `"\!important!.txt"`. - .replace(REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION, '!') - // > Put a backslash ("\") in front of the first hash for patterns that - // > begin with a hash. - .replace(REGEX_REPLACE_LEADING_EXCAPED_HASH, '#') - - const regex = makeRegex(pattern, ignoreCase) - - return new IgnoreRule( - origin, - pattern, - negative, - regex - ) -} - -const throwError = (message, Ctor) => { - throw new Ctor(message) -} - -const checkPath = (path, originalPath, doThrow) => { - if (!isString(path)) { - return doThrow( - `path must be a string, but got \`${originalPath}\``, - TypeError - ) - } - - // We don't know if we should ignore EMPTY, so throw - if (!path) { - return doThrow(`path must not be empty`, TypeError) - } - - // Check if it is a relative path - if (checkPath.isNotRelative(path)) { - const r = '`path.relative()`d' - return doThrow( - `path should be a ${r} string, but got "${originalPath}"`, - RangeError - ) - } - - return true -} - -const isNotRelative = path => REGEX_TEST_INVALID_PATH.test(path) - -checkPath.isNotRelative = isNotRelative -checkPath.convert = p => p - -class Ignore { - constructor ({ - ignorecase = true, - ignoreCase = ignorecase, - allowRelativePaths = false - } = {}) { - define(this, KEY_IGNORE, true) - - this._rules = [] - this._ignoreCase = ignoreCase - this._allowRelativePaths = allowRelativePaths - this._initCache() - } - - _initCache () { - this._ignoreCache = Object.create(null) - this._testCache = Object.create(null) - } - - _addPattern (pattern) { - // #32 - if (pattern && pattern[KEY_IGNORE]) { - this._rules = this._rules.concat(pattern._rules) - this._added = true - return - } - - if (checkPattern(pattern)) { - const rule = createRule(pattern, this._ignoreCase) - this._added = true - this._rules.push(rule) - } - } - - // @param {Array | string | Ignore} pattern - add (pattern) { - this._added = false - - makeArray( - isString(pattern) - ? splitPattern(pattern) - : pattern - ).forEach(this._addPattern, this) - - // Some rules have just added to the ignore, - // making the behavior changed. - if (this._added) { - this._initCache() - } - - return this - } - - // legacy - addPattern (pattern) { - return this.add(pattern) - } - - // | ignored : unignored - // negative | 0:0 | 0:1 | 1:0 | 1:1 - // -------- | ------- | ------- | ------- | -------- - // 0 | TEST | TEST | SKIP | X - // 1 | TESTIF | SKIP | TEST | X - - // - SKIP: always skip - // - TEST: always test - // - TESTIF: only test if checkUnignored - // - X: that never happen - - // @param {boolean} whether should check if the path is unignored, - // setting `checkUnignored` to `false` could reduce additional - // path matching. - - // @returns {TestResult} true if a file is ignored - _testOne (path, checkUnignored) { - let ignored = false - let unignored = false - - this._rules.forEach(rule => { - const {negative} = rule - if ( - unignored === negative && ignored !== unignored - || negative && !ignored && !unignored && !checkUnignored - ) { - return - } - - const matched = rule.regex.test(path) - - if (matched) { - ignored = !negative - unignored = negative - } - }) - - return { - ignored, - unignored - } - } - - // @returns {TestResult} - _test (originalPath, cache, checkUnignored, slices) { - const path = originalPath - // Supports nullable path - && checkPath.convert(originalPath) - - checkPath( - path, - originalPath, - this._allowRelativePaths - ? RETURN_FALSE - : throwError - ) - - return this._t(path, cache, checkUnignored, slices) - } - - _t (path, cache, checkUnignored, slices) { - if (path in cache) { - return cache[path] - } - - if (!slices) { - // path/to/a.js - // ['path', 'to', 'a.js'] - slices = path.split(SLASH) - } - - slices.pop() - - // If the path has no parent directory, just test it - if (!slices.length) { - return cache[path] = this._testOne(path, checkUnignored) - } - - const parent = this._t( - slices.join(SLASH) + SLASH, - cache, - checkUnignored, - slices - ) - - // If the path contains a parent directory, check the parent first - return cache[path] = parent.ignored - // > It is not possible to re-include a file if a parent directory of - // > that file is excluded. - ? parent - : this._testOne(path, checkUnignored) - } - - ignores (path) { - return this._test(path, this._ignoreCache, false).ignored - } - - createFilter () { - return path => !this.ignores(path) - } - - filter (paths) { - return makeArray(paths).filter(this.createFilter()) - } - - // @returns {TestResult} - test (path) { - return this._test(path, this._testCache, true) - } -} - -const factory = options => new Ignore(options) - -const isPathValid = path => - checkPath(path && checkPath.convert(path), path, RETURN_FALSE) - -factory.isPathValid = isPathValid - -// Fixes typescript -factory.default = factory - -module.exports = factory - -// Windows -// -------------------------------------------------------------- -/* istanbul ignore if */ -if ( - // Detect `process` so that it can run in browsers. - typeof process !== 'undefined' - && ( - process.env && process.env.IGNORE_TEST_WIN32 - || process.platform === 'win32' - ) -) { - /* eslint no-control-regex: "off" */ - const makePosix = str => /^\\\\\?\\/.test(str) - || /["<>|\u0000-\u001F]+/u.test(str) - ? str - : str.replace(/\\/g, '/') - - checkPath.convert = makePosix - - // 'C:\\foo' <- 'C:\\foo' has been converted to 'C:/' - // 'd:\\foo' - const REGIX_IS_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i - checkPath.isNotRelative = path => - REGIX_IS_WINDOWS_PATH_ABSOLUTE.test(path) - || isNotRelative(path) -} diff --git a/node_modules/ignore/legacy.js b/node_modules/ignore/legacy.js deleted file mode 100644 index de3f66c..0000000 --- a/node_modules/ignore/legacy.js +++ /dev/null @@ -1,559 +0,0 @@ -"use strict"; - -function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } -function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } -function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } -function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return _typeof(key) === "symbol" ? key : String(key); } -function _toPrimitive(input, hint) { if (_typeof(input) !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (_typeof(res) !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } -function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } -function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; } -function _iterableToArrayLimit(arr, i) { var _i = null == arr ? null : "undefined" != typeof Symbol && arr[Symbol.iterator] || arr["@@iterator"]; if (null != _i) { var _s, _e, _x, _r, _arr = [], _n = !0, _d = !1; try { if (_x = (_i = _i.call(arr)).next, 0 === i) { if (Object(_i) !== _i) return; _n = !1; } else for (; !(_n = (_s = _x.call(_i)).done) && (_arr.push(_s.value), _arr.length !== i); _n = !0); } catch (err) { _d = !0, _e = err; } finally { try { if (!_n && null != _i["return"] && (_r = _i["return"](), Object(_r) !== _r)) return; } finally { if (_d) throw _e; } } return _arr; } } -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } -// A simple implementation of make-array -function makeArray(subject) { - return Array.isArray(subject) ? subject : [subject]; -} -var EMPTY = ''; -var SPACE = ' '; -var ESCAPE = '\\'; -var REGEX_TEST_BLANK_LINE = /^\s+$/; -var REGEX_INVALID_TRAILING_BACKSLASH = /(?:[^\\]|^)\\$/; -var REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION = /^\\!/; -var REGEX_REPLACE_LEADING_EXCAPED_HASH = /^\\#/; -var REGEX_SPLITALL_CRLF = /\r?\n/g; -// /foo, -// ./foo, -// ../foo, -// . -// .. -var REGEX_TEST_INVALID_PATH = /^\.*\/|^\.+$/; -var SLASH = '/'; - -// Do not use ternary expression here, since "istanbul ignore next" is buggy -var TMP_KEY_IGNORE = 'node-ignore'; -/* istanbul ignore else */ -if (typeof Symbol !== 'undefined') { - TMP_KEY_IGNORE = Symbol["for"]('node-ignore'); -} -var KEY_IGNORE = TMP_KEY_IGNORE; -var define = function define(object, key, value) { - return Object.defineProperty(object, key, { - value: value - }); -}; -var REGEX_REGEXP_RANGE = /([0-z])-([0-z])/g; -var RETURN_FALSE = function RETURN_FALSE() { - return false; -}; - -// Sanitize the range of a regular expression -// The cases are complicated, see test cases for details -var sanitizeRange = function sanitizeRange(range) { - return range.replace(REGEX_REGEXP_RANGE, function (match, from, to) { - return from.charCodeAt(0) <= to.charCodeAt(0) ? match - // Invalid range (out of order) which is ok for gitignore rules but - // fatal for JavaScript regular expression, so eliminate it. - : EMPTY; - }); -}; - -// See fixtures #59 -var cleanRangeBackSlash = function cleanRangeBackSlash(slashes) { - var length = slashes.length; - return slashes.slice(0, length - length % 2); -}; - -// > If the pattern ends with a slash, -// > it is removed for the purpose of the following description, -// > but it would only find a match with a directory. -// > In other words, foo/ will match a directory foo and paths underneath it, -// > but will not match a regular file or a symbolic link foo -// > (this is consistent with the way how pathspec works in general in Git). -// '`foo/`' will not match regular file '`foo`' or symbolic link '`foo`' -// -> ignore-rules will not deal with it, because it costs extra `fs.stat` call -// you could use option `mark: true` with `glob` - -// '`foo/`' should not continue with the '`..`' -var REPLACERS = [[ -// remove BOM -// TODO: -// Other similar zero-width characters? -/^\uFEFF/, function () { - return EMPTY; -}], -// > Trailing spaces are ignored unless they are quoted with backslash ("\") -[ -// (a\ ) -> (a ) -// (a ) -> (a) -// (a ) -> (a) -// (a \ ) -> (a ) -/((?:\\\\)*?)(\\?\s+)$/, function (_, m1, m2) { - return m1 + (m2.indexOf('\\') === 0 ? SPACE : EMPTY); -}], -// replace (\ ) with ' ' -// (\ ) -> ' ' -// (\\ ) -> '\\ ' -// (\\\ ) -> '\\ ' -[/(\\+?)\s/g, function (_, m1) { - var length = m1.length; - return m1.slice(0, length - length % 2) + SPACE; -}], -// Escape metacharacters -// which is written down by users but means special for regular expressions. - -// > There are 12 characters with special meanings: -// > - the backslash \, -// > - the caret ^, -// > - the dollar sign $, -// > - the period or dot ., -// > - the vertical bar or pipe symbol |, -// > - the question mark ?, -// > - the asterisk or star *, -// > - the plus sign +, -// > - the opening parenthesis (, -// > - the closing parenthesis ), -// > - and the opening square bracket [, -// > - the opening curly brace {, -// > These special characters are often called "metacharacters". -[/[\\$.|*+(){^]/g, function (match) { - return "\\".concat(match); -}], [ -// > a question mark (?) matches a single character -/(?!\\)\?/g, function () { - return '[^/]'; -}], -// leading slash -[ -// > A leading slash matches the beginning of the pathname. -// > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". -// A leading slash matches the beginning of the pathname -/^\//, function () { - return '^'; -}], -// replace special metacharacter slash after the leading slash -[/\//g, function () { - return '\\/'; -}], [ -// > A leading "**" followed by a slash means match in all directories. -// > For example, "**/foo" matches file or directory "foo" anywhere, -// > the same as pattern "foo". -// > "**/foo/bar" matches file or directory "bar" anywhere that is directly -// > under directory "foo". -// Notice that the '*'s have been replaced as '\\*' -/^\^*\\\*\\\*\\\//, -// '**/foo' <-> 'foo' -function () { - return '^(?:.*\\/)?'; -}], -// starting -[ -// there will be no leading '/' -// (which has been replaced by section "leading slash") -// If starts with '**', adding a '^' to the regular expression also works -/^(?=[^^])/, function startingReplacer() { - // If has a slash `/` at the beginning or middle - return !/\/(?!$)/.test(this) - // > Prior to 2.22.1 - // > If the pattern does not contain a slash /, - // > Git treats it as a shell glob pattern - // Actually, if there is only a trailing slash, - // git also treats it as a shell glob pattern - - // After 2.22.1 (compatible but clearer) - // > If there is a separator at the beginning or middle (or both) - // > of the pattern, then the pattern is relative to the directory - // > level of the particular .gitignore file itself. - // > Otherwise the pattern may also match at any level below - // > the .gitignore level. - ? '(?:^|\\/)' - - // > Otherwise, Git treats the pattern as a shell glob suitable for - // > consumption by fnmatch(3) - : '^'; -}], -// two globstars -[ -// Use lookahead assertions so that we could match more than one `'/**'` -/\\\/\\\*\\\*(?=\\\/|$)/g, -// Zero, one or several directories -// should not use '*', or it will be replaced by the next replacer - -// Check if it is not the last `'/**'` -function (_, index, str) { - return index + 6 < str.length - - // case: /**/ - // > A slash followed by two consecutive asterisks then a slash matches - // > zero or more directories. - // > For example, "a/**/b" matches "a/b", "a/x/b", "a/x/y/b" and so on. - // '/**/' - ? '(?:\\/[^\\/]+)*' - - // case: /** - // > A trailing `"/**"` matches everything inside. - - // #21: everything inside but it should not include the current folder - : '\\/.+'; -}], -// normal intermediate wildcards -[ -// Never replace escaped '*' -// ignore rule '\*' will match the path '*' - -// 'abc.*/' -> go -// 'abc.*' -> skip this rule, -// coz trailing single wildcard will be handed by [trailing wildcard] -/(^|[^\\]+)(\\\*)+(?=.+)/g, -// '*.js' matches '.js' -// '*.js' doesn't match 'abc' -function (_, p1, p2) { - // 1. - // > An asterisk "*" matches anything except a slash. - // 2. - // > Other consecutive asterisks are considered regular asterisks - // > and will match according to the previous rules. - var unescaped = p2.replace(/\\\*/g, '[^\\/]*'); - return p1 + unescaped; -}], [ -// unescape, revert step 3 except for back slash -// For example, if a user escape a '\\*', -// after step 3, the result will be '\\\\\\*' -/\\\\\\(?=[$.|*+(){^])/g, function () { - return ESCAPE; -}], [ -// '\\\\' -> '\\' -/\\\\/g, function () { - return ESCAPE; -}], [ -// > The range notation, e.g. [a-zA-Z], -// > can be used to match one of the characters in a range. - -// `\` is escaped by step 3 -/(\\)?\[([^\]/]*?)(\\*)($|\])/g, function (match, leadEscape, range, endEscape, close) { - return leadEscape === ESCAPE - // '\\[bar]' -> '\\\\[bar\\]' - ? "\\[".concat(range).concat(cleanRangeBackSlash(endEscape)).concat(close) : close === ']' ? endEscape.length % 2 === 0 - // A normal case, and it is a range notation - // '[bar]' - // '[bar\\\\]' - ? "[".concat(sanitizeRange(range)).concat(endEscape, "]") // Invalid range notaton - // '[bar\\]' -> '[bar\\\\]' - : '[]' : '[]'; -}], -// ending -[ -// 'js' will not match 'js.' -// 'ab' will not match 'abc' -/(?:[^*])$/, -// WTF! -// https://git-scm.com/docs/gitignore -// changes in [2.22.1](https://git-scm.com/docs/gitignore/2.22.1) -// which re-fixes #24, #38 - -// > If there is a separator at the end of the pattern then the pattern -// > will only match directories, otherwise the pattern can match both -// > files and directories. - -// 'js*' will not match 'a.js' -// 'js/' will not match 'a.js' -// 'js' will match 'a.js' and 'a.js/' -function (match) { - return /\/$/.test(match) - // foo/ will not match 'foo' - ? "".concat(match, "$") // foo matches 'foo' and 'foo/' - : "".concat(match, "(?=$|\\/$)"); -}], -// trailing wildcard -[/(\^|\\\/)?\\\*$/, function (_, p1) { - var prefix = p1 - // '\^': - // '/*' does not match EMPTY - // '/*' does not match everything - - // '\\\/': - // 'abc/*' does not match 'abc/' - ? "".concat(p1, "[^/]+") // 'a*' matches 'a' - // 'a*' matches 'aa' - : '[^/]*'; - return "".concat(prefix, "(?=$|\\/$)"); -}]]; - -// A simple cache, because an ignore rule only has only one certain meaning -var regexCache = Object.create(null); - -// @param {pattern} -var makeRegex = function makeRegex(pattern, ignoreCase) { - var source = regexCache[pattern]; - if (!source) { - source = REPLACERS.reduce(function (prev, _ref) { - var _ref2 = _slicedToArray(_ref, 2), - matcher = _ref2[0], - replacer = _ref2[1]; - return prev.replace(matcher, replacer.bind(pattern)); - }, pattern); - regexCache[pattern] = source; - } - return ignoreCase ? new RegExp(source, 'i') : new RegExp(source); -}; -var isString = function isString(subject) { - return typeof subject === 'string'; -}; - -// > A blank line matches no files, so it can serve as a separator for readability. -var checkPattern = function checkPattern(pattern) { - return pattern && isString(pattern) && !REGEX_TEST_BLANK_LINE.test(pattern) && !REGEX_INVALID_TRAILING_BACKSLASH.test(pattern) - - // > A line starting with # serves as a comment. - && pattern.indexOf('#') !== 0; -}; -var splitPattern = function splitPattern(pattern) { - return pattern.split(REGEX_SPLITALL_CRLF); -}; -var IgnoreRule = /*#__PURE__*/_createClass(function IgnoreRule(origin, pattern, negative, regex) { - _classCallCheck(this, IgnoreRule); - this.origin = origin; - this.pattern = pattern; - this.negative = negative; - this.regex = regex; -}); -var createRule = function createRule(pattern, ignoreCase) { - var origin = pattern; - var negative = false; - - // > An optional prefix "!" which negates the pattern; - if (pattern.indexOf('!') === 0) { - negative = true; - pattern = pattern.substr(1); - } - pattern = pattern - // > Put a backslash ("\") in front of the first "!" for patterns that - // > begin with a literal "!", for example, `"\!important!.txt"`. - .replace(REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION, '!') - // > Put a backslash ("\") in front of the first hash for patterns that - // > begin with a hash. - .replace(REGEX_REPLACE_LEADING_EXCAPED_HASH, '#'); - var regex = makeRegex(pattern, ignoreCase); - return new IgnoreRule(origin, pattern, negative, regex); -}; -var throwError = function throwError(message, Ctor) { - throw new Ctor(message); -}; -var checkPath = function checkPath(path, originalPath, doThrow) { - if (!isString(path)) { - return doThrow("path must be a string, but got `".concat(originalPath, "`"), TypeError); - } - - // We don't know if we should ignore EMPTY, so throw - if (!path) { - return doThrow("path must not be empty", TypeError); - } - - // Check if it is a relative path - if (checkPath.isNotRelative(path)) { - var r = '`path.relative()`d'; - return doThrow("path should be a ".concat(r, " string, but got \"").concat(originalPath, "\""), RangeError); - } - return true; -}; -var isNotRelative = function isNotRelative(path) { - return REGEX_TEST_INVALID_PATH.test(path); -}; -checkPath.isNotRelative = isNotRelative; -checkPath.convert = function (p) { - return p; -}; -var Ignore = /*#__PURE__*/function () { - function Ignore() { - var _ref3 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}, - _ref3$ignorecase = _ref3.ignorecase, - ignorecase = _ref3$ignorecase === void 0 ? true : _ref3$ignorecase, - _ref3$ignoreCase = _ref3.ignoreCase, - ignoreCase = _ref3$ignoreCase === void 0 ? ignorecase : _ref3$ignoreCase, - _ref3$allowRelativePa = _ref3.allowRelativePaths, - allowRelativePaths = _ref3$allowRelativePa === void 0 ? false : _ref3$allowRelativePa; - _classCallCheck(this, Ignore); - define(this, KEY_IGNORE, true); - this._rules = []; - this._ignoreCase = ignoreCase; - this._allowRelativePaths = allowRelativePaths; - this._initCache(); - } - _createClass(Ignore, [{ - key: "_initCache", - value: function _initCache() { - this._ignoreCache = Object.create(null); - this._testCache = Object.create(null); - } - }, { - key: "_addPattern", - value: function _addPattern(pattern) { - // #32 - if (pattern && pattern[KEY_IGNORE]) { - this._rules = this._rules.concat(pattern._rules); - this._added = true; - return; - } - if (checkPattern(pattern)) { - var rule = createRule(pattern, this._ignoreCase); - this._added = true; - this._rules.push(rule); - } - } - - // @param {Array | string | Ignore} pattern - }, { - key: "add", - value: function add(pattern) { - this._added = false; - makeArray(isString(pattern) ? splitPattern(pattern) : pattern).forEach(this._addPattern, this); - - // Some rules have just added to the ignore, - // making the behavior changed. - if (this._added) { - this._initCache(); - } - return this; - } - - // legacy - }, { - key: "addPattern", - value: function addPattern(pattern) { - return this.add(pattern); - } - - // | ignored : unignored - // negative | 0:0 | 0:1 | 1:0 | 1:1 - // -------- | ------- | ------- | ------- | -------- - // 0 | TEST | TEST | SKIP | X - // 1 | TESTIF | SKIP | TEST | X - - // - SKIP: always skip - // - TEST: always test - // - TESTIF: only test if checkUnignored - // - X: that never happen - - // @param {boolean} whether should check if the path is unignored, - // setting `checkUnignored` to `false` could reduce additional - // path matching. - - // @returns {TestResult} true if a file is ignored - }, { - key: "_testOne", - value: function _testOne(path, checkUnignored) { - var ignored = false; - var unignored = false; - this._rules.forEach(function (rule) { - var negative = rule.negative; - if (unignored === negative && ignored !== unignored || negative && !ignored && !unignored && !checkUnignored) { - return; - } - var matched = rule.regex.test(path); - if (matched) { - ignored = !negative; - unignored = negative; - } - }); - return { - ignored: ignored, - unignored: unignored - }; - } - - // @returns {TestResult} - }, { - key: "_test", - value: function _test(originalPath, cache, checkUnignored, slices) { - var path = originalPath - // Supports nullable path - && checkPath.convert(originalPath); - checkPath(path, originalPath, this._allowRelativePaths ? RETURN_FALSE : throwError); - return this._t(path, cache, checkUnignored, slices); - } - }, { - key: "_t", - value: function _t(path, cache, checkUnignored, slices) { - if (path in cache) { - return cache[path]; - } - if (!slices) { - // path/to/a.js - // ['path', 'to', 'a.js'] - slices = path.split(SLASH); - } - slices.pop(); - - // If the path has no parent directory, just test it - if (!slices.length) { - return cache[path] = this._testOne(path, checkUnignored); - } - var parent = this._t(slices.join(SLASH) + SLASH, cache, checkUnignored, slices); - - // If the path contains a parent directory, check the parent first - return cache[path] = parent.ignored - // > It is not possible to re-include a file if a parent directory of - // > that file is excluded. - ? parent : this._testOne(path, checkUnignored); - } - }, { - key: "ignores", - value: function ignores(path) { - return this._test(path, this._ignoreCache, false).ignored; - } - }, { - key: "createFilter", - value: function createFilter() { - var _this = this; - return function (path) { - return !_this.ignores(path); - }; - } - }, { - key: "filter", - value: function filter(paths) { - return makeArray(paths).filter(this.createFilter()); - } - - // @returns {TestResult} - }, { - key: "test", - value: function test(path) { - return this._test(path, this._testCache, true); - } - }]); - return Ignore; -}(); -var factory = function factory(options) { - return new Ignore(options); -}; -var isPathValid = function isPathValid(path) { - return checkPath(path && checkPath.convert(path), path, RETURN_FALSE); -}; -factory.isPathValid = isPathValid; - -// Fixes typescript -factory["default"] = factory; -module.exports = factory; - -// Windows -// -------------------------------------------------------------- -/* istanbul ignore if */ -if ( -// Detect `process` so that it can run in browsers. -typeof process !== 'undefined' && (process.env && process.env.IGNORE_TEST_WIN32 || process.platform === 'win32')) { - /* eslint no-control-regex: "off" */ - var makePosix = function makePosix(str) { - return /^\\\\\?\\/.test(str) || /[\0-\x1F"<>\|]+/.test(str) ? str : str.replace(/\\/g, '/'); - }; - checkPath.convert = makePosix; - - // 'C:\\foo' <- 'C:\\foo' has been converted to 'C:/' - // 'd:\\foo' - var REGIX_IS_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i; - checkPath.isNotRelative = function (path) { - return REGIX_IS_WINDOWS_PATH_ABSOLUTE.test(path) || isNotRelative(path); - }; -} diff --git a/node_modules/ignore/package.json b/node_modules/ignore/package.json deleted file mode 100644 index b7f684e..0000000 --- a/node_modules/ignore/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "ignore", - "version": "5.3.2", - "description": "Ignore is a manager and filter for .gitignore rules, the one used by eslint, gitbook and many others.", - "files": [ - "legacy.js", - "index.js", - "index.d.ts", - "LICENSE-MIT" - ], - "scripts": { - "prepublishOnly": "npm run build", - "build": "babel -o legacy.js index.js", - "test:lint": "eslint .", - "test:tsc": "tsc ./test/ts/simple.ts --lib ES6", - "test:ts": "node ./test/ts/simple.js", - "tap": "tap --reporter classic", - "test:git": "npm run tap test/git-check-ignore.js", - "test:ignore": "npm run tap test/ignore.js", - "test:ignore:only": "IGNORE_ONLY_IGNORES=1 npm run tap test/ignore.js", - "test:others": "npm run tap test/others.js", - "test:cases": "npm run tap test/*.js -- --coverage", - "test:no-coverage": "npm run tap test/*.js -- --no-check-coverage", - "test:only": "npm run test:lint && npm run test:tsc && npm run test:ts && npm run test:cases", - "test": "npm run test:only", - "test:win32": "IGNORE_TEST_WIN32=1 npm run test", - "report": "tap --coverage-report=html", - "posttest": "npm run report && codecov" - }, - "repository": { - "type": "git", - "url": "git@github.com:kaelzhang/node-ignore.git" - }, - "keywords": [ - "ignore", - ".gitignore", - "gitignore", - "npmignore", - "rules", - "manager", - "filter", - "regexp", - "regex", - "fnmatch", - "glob", - "asterisks", - "regular-expression" - ], - "author": "kael", - "license": "MIT", - "bugs": { - "url": "https://github.com/kaelzhang/node-ignore/issues" - }, - "devDependencies": { - "@babel/cli": "^7.22.9", - "@babel/core": "^7.22.9", - "@babel/preset-env": "^7.22.9", - "codecov": "^3.8.2", - "debug": "^4.3.4", - "eslint": "^8.46.0", - "eslint-config-ostai": "^3.0.0", - "eslint-plugin-import": "^2.28.0", - "mkdirp": "^3.0.1", - "pre-suf": "^1.1.1", - "rimraf": "^6.0.1", - "spawn-sync": "^2.0.0", - "tap": "^16.3.9", - "tmp": "0.2.3", - "typescript": "^5.1.6" - }, - "engines": { - "node": ">= 4" - } -} diff --git a/node_modules/is-binary-path/index.d.ts b/node_modules/is-binary-path/index.d.ts deleted file mode 100644 index 19dcd43..0000000 --- a/node_modules/is-binary-path/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** -Check if a file path is a binary file. - -@example -``` -import isBinaryPath = require('is-binary-path'); - -isBinaryPath('source/unicorn.png'); -//=> true - -isBinaryPath('source/unicorn.txt'); -//=> false -``` -*/ -declare function isBinaryPath(filePath: string): boolean; - -export = isBinaryPath; diff --git a/node_modules/is-binary-path/index.js b/node_modules/is-binary-path/index.js deleted file mode 100644 index ef7548c..0000000 --- a/node_modules/is-binary-path/index.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; -const path = require('path'); -const binaryExtensions = require('binary-extensions'); - -const extensions = new Set(binaryExtensions); - -module.exports = filePath => extensions.has(path.extname(filePath).slice(1).toLowerCase()); diff --git a/node_modules/is-binary-path/license b/node_modules/is-binary-path/license deleted file mode 100644 index 401b1c7..0000000 --- a/node_modules/is-binary-path/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-binary-path/package.json b/node_modules/is-binary-path/package.json deleted file mode 100644 index a8d005a..0000000 --- a/node_modules/is-binary-path/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "is-binary-path", - "version": "2.1.0", - "description": "Check if a file path is a binary file", - "license": "MIT", - "repository": "sindresorhus/is-binary-path", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "binary", - "extensions", - "extension", - "file", - "path", - "check", - "detect", - "is" - ], - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/is-binary-path/readme.md b/node_modules/is-binary-path/readme.md deleted file mode 100644 index b4ab025..0000000 --- a/node_modules/is-binary-path/readme.md +++ /dev/null @@ -1,34 +0,0 @@ -# is-binary-path [![Build Status](https://travis-ci.org/sindresorhus/is-binary-path.svg?branch=master)](https://travis-ci.org/sindresorhus/is-binary-path) - -> Check if a file path is a binary file - - -## Install - -``` -$ npm install is-binary-path -``` - - -## Usage - -```js -const isBinaryPath = require('is-binary-path'); - -isBinaryPath('source/unicorn.png'); -//=> true - -isBinaryPath('source/unicorn.txt'); -//=> false -``` - - -## Related - -- [binary-extensions](https://github.com/sindresorhus/binary-extensions) - List of binary file extensions -- [is-text-path](https://github.com/sindresorhus/is-text-path) - Check if a filepath is a text file - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com), [Paul Miller](https://paulmillr.com) diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE deleted file mode 100644 index 842218c..0000000 --- a/node_modules/is-extglob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2016, Jon Schlinkert - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md deleted file mode 100644 index 0416af5..0000000 --- a/node_modules/is-extglob/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) - -> Returns true if a string has an extglob. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-extglob -``` - -## Usage - -```js -var isExtglob = require('is-extglob'); -``` - -**True** - -```js -isExtglob('?(abc)'); -isExtglob('@(abc)'); -isExtglob('!(abc)'); -isExtglob('*(abc)'); -isExtglob('+(abc)'); -``` - -**False** - -Escaped extglobs: - -```js -isExtglob('\\?(abc)'); -isExtglob('\\@(abc)'); -isExtglob('\\!(abc)'); -isExtglob('\\*(abc)'); -isExtglob('\\+(abc)'); -``` - -Everything else... - -```js -isExtglob('foo.js'); -isExtglob('!foo.js'); -isExtglob('*.js'); -isExtglob('**/abc.js'); -isExtglob('abc/*.js'); -isExtglob('abc/(aaa|bbb).js'); -isExtglob('abc/[a-z].js'); -isExtglob('abc/{a,b}.js'); -isExtglob('abc/?.js'); -isExtglob('abc.js'); -isExtglob('abc/def/ghi.js'); -``` - -## History - -**v2.0** - -Adds support for escaping. Escaped exglobs no longer return true. - -## About - -### Related projects - -* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") -* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") - -### Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -### Building docs - -_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ - -To generate the readme and API documentation with [verb](https://github.com/verbose/verb): - -```sh -$ npm install -g verb verb-generate-readme && verb -``` - -### Running tests - -Install dev dependencies: - -```sh -$ npm install -d && npm test -``` - -### Author - -**Jon Schlinkert** - -* [github/jonschlinkert](https://github.com/jonschlinkert) -* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) - -### License - -Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js deleted file mode 100644 index c1d986f..0000000 --- a/node_modules/is-extglob/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/*! - * is-extglob - * - * Copyright (c) 2014-2016, Jon Schlinkert. - * Licensed under the MIT License. - */ - -module.exports = function isExtglob(str) { - if (typeof str !== 'string' || str === '') { - return false; - } - - var match; - while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { - if (match[2]) return true; - str = str.slice(match.index + match[0].length); - } - - return false; -}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json deleted file mode 100644 index 7a90836..0000000 --- a/node_modules/is-extglob/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "is-extglob", - "description": "Returns true if a string has an extglob.", - "version": "2.1.1", - "homepage": "https://github.com/jonschlinkert/is-extglob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "repository": "jonschlinkert/is-extglob", - "bugs": { - "url": "https://github.com/jonschlinkert/is-extglob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "has-glob", - "is-glob", - "micromatch" - ] - }, - "reflinks": [ - "verb", - "verb-generate-readme" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/is-fullwidth-code-point/index.d.ts b/node_modules/is-fullwidth-code-point/index.d.ts deleted file mode 100644 index 729d202..0000000 --- a/node_modules/is-fullwidth-code-point/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** -Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms). - -@param codePoint - The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. - -@example -``` -import isFullwidthCodePoint from 'is-fullwidth-code-point'; - -isFullwidthCodePoint('谢'.codePointAt(0)); -//=> true - -isFullwidthCodePoint('a'.codePointAt(0)); -//=> false -``` -*/ -export default function isFullwidthCodePoint(codePoint: number): boolean; diff --git a/node_modules/is-fullwidth-code-point/index.js b/node_modules/is-fullwidth-code-point/index.js deleted file mode 100644 index 671f97f..0000000 --- a/node_modules/is-fullwidth-code-point/index.js +++ /dev/null @@ -1,50 +0,0 @@ -/* eslint-disable yoda */ -'use strict'; - -const isFullwidthCodePoint = codePoint => { - if (Number.isNaN(codePoint)) { - return false; - } - - // Code points are derived from: - // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt - if ( - codePoint >= 0x1100 && ( - codePoint <= 0x115F || // Hangul Jamo - codePoint === 0x2329 || // LEFT-POINTING ANGLE BRACKET - codePoint === 0x232A || // RIGHT-POINTING ANGLE BRACKET - // CJK Radicals Supplement .. Enclosed CJK Letters and Months - (0x2E80 <= codePoint && codePoint <= 0x3247 && codePoint !== 0x303F) || - // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A - (0x3250 <= codePoint && codePoint <= 0x4DBF) || - // CJK Unified Ideographs .. Yi Radicals - (0x4E00 <= codePoint && codePoint <= 0xA4C6) || - // Hangul Jamo Extended-A - (0xA960 <= codePoint && codePoint <= 0xA97C) || - // Hangul Syllables - (0xAC00 <= codePoint && codePoint <= 0xD7A3) || - // CJK Compatibility Ideographs - (0xF900 <= codePoint && codePoint <= 0xFAFF) || - // Vertical Forms - (0xFE10 <= codePoint && codePoint <= 0xFE19) || - // CJK Compatibility Forms .. Small Form Variants - (0xFE30 <= codePoint && codePoint <= 0xFE6B) || - // Halfwidth and Fullwidth Forms - (0xFF01 <= codePoint && codePoint <= 0xFF60) || - (0xFFE0 <= codePoint && codePoint <= 0xFFE6) || - // Kana Supplement - (0x1B000 <= codePoint && codePoint <= 0x1B001) || - // Enclosed Ideographic Supplement - (0x1F200 <= codePoint && codePoint <= 0x1F251) || - // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane - (0x20000 <= codePoint && codePoint <= 0x3FFFD) - ) - ) { - return true; - } - - return false; -}; - -module.exports = isFullwidthCodePoint; -module.exports.default = isFullwidthCodePoint; diff --git a/node_modules/is-fullwidth-code-point/license b/node_modules/is-fullwidth-code-point/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/is-fullwidth-code-point/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-fullwidth-code-point/package.json b/node_modules/is-fullwidth-code-point/package.json deleted file mode 100644 index 2137e88..0000000 --- a/node_modules/is-fullwidth-code-point/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "is-fullwidth-code-point", - "version": "3.0.0", - "description": "Check if the character represented by a given Unicode code point is fullwidth", - "license": "MIT", - "repository": "sindresorhus/is-fullwidth-code-point", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd-check" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "fullwidth", - "full-width", - "full", - "width", - "unicode", - "character", - "string", - "codepoint", - "code", - "point", - "is", - "detect", - "check" - ], - "devDependencies": { - "ava": "^1.3.1", - "tsd-check": "^0.5.0", - "xo": "^0.24.0" - } -} diff --git a/node_modules/is-fullwidth-code-point/readme.md b/node_modules/is-fullwidth-code-point/readme.md deleted file mode 100644 index 4236bba..0000000 --- a/node_modules/is-fullwidth-code-point/readme.md +++ /dev/null @@ -1,39 +0,0 @@ -# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) - -> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) - - -## Install - -``` -$ npm install is-fullwidth-code-point -``` - - -## Usage - -```js -const isFullwidthCodePoint = require('is-fullwidth-code-point'); - -isFullwidthCodePoint('谢'.codePointAt(0)); -//=> true - -isFullwidthCodePoint('a'.codePointAt(0)); -//=> false -``` - - -## API - -### isFullwidthCodePoint(codePoint) - -#### codePoint - -Type: `number` - -The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE deleted file mode 100644 index 3f2eca1..0000000 --- a/node_modules/is-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md deleted file mode 100644 index 740724b..0000000 --- a/node_modules/is-glob/README.md +++ /dev/null @@ -1,206 +0,0 @@ -# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) - -> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-glob -``` - -You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). - -## Usage - -```js -var isGlob = require('is-glob'); -``` - -### Default behavior - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js'); -isGlob('*.js'); -isGlob('**/abc.js'); -isGlob('abc/*.js'); -isGlob('abc/(aaa|bbb).js'); -isGlob('abc/[a-z].js'); -isGlob('abc/{a,b}.js'); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js'); -isGlob('abc/!(a).js'); -isGlob('abc/+(a).js'); -isGlob('abc/*(a).js'); -isGlob('abc/?(a).js'); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('abc/\\@(a).js'); -isGlob('abc/\\!(a).js'); -isGlob('abc/\\+(a).js'); -isGlob('abc/\\*(a).js'); -isGlob('abc/\\?(a).js'); -isGlob('\\!foo.js'); -isGlob('\\*.js'); -isGlob('\\*\\*/abc.js'); -isGlob('abc/\\*.js'); -isGlob('abc/\\(aaa|bbb).js'); -isGlob('abc/\\[a-z].js'); -isGlob('abc/\\{a,b}.js'); -//=> false -``` - -Patterns that do not have glob patterns return `false`: - -```js -isGlob('abc.js'); -isGlob('abc/def/ghi.js'); -isGlob('foo.js'); -isGlob('abc/@.js'); -isGlob('abc/+.js'); -isGlob('abc/?.js'); -isGlob(); -isGlob(null); -//=> false -``` - -Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): - -```js -isGlob(['**/*.js']); -isGlob(['foo.js']); -//=> false -``` - -### Option strict - -When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that -some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js', {strict: false}); -isGlob('*.js', {strict: false}); -isGlob('**/abc.js', {strict: false}); -isGlob('abc/*.js', {strict: false}); -isGlob('abc/(aaa|bbb).js', {strict: false}); -isGlob('abc/[a-z].js', {strict: false}); -isGlob('abc/{a,b}.js', {strict: false}); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js', {strict: false}); -isGlob('abc/!(a).js', {strict: false}); -isGlob('abc/+(a).js', {strict: false}); -isGlob('abc/*(a).js', {strict: false}); -isGlob('abc/?(a).js', {strict: false}); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('\\!foo.js', {strict: false}); -isGlob('\\*.js', {strict: false}); -isGlob('\\*\\*/abc.js', {strict: false}); -isGlob('abc/\\*.js', {strict: false}); -isGlob('abc/\\(aaa|bbb).js', {strict: false}); -isGlob('abc/\\[a-z].js', {strict: false}); -isGlob('abc/\\{a,b}.js', {strict: false}); -//=> false -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") -* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") -* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") -* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 47 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [doowb](https://github.com/doowb) | -| 1 | [phated](https://github.com/phated) | -| 1 | [danhper](https://github.com/danhper) | -| 1 | [paulmillr](https://github.com/paulmillr) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js deleted file mode 100644 index 620f563..0000000 --- a/node_modules/is-glob/index.js +++ /dev/null @@ -1,150 +0,0 @@ -/*! - * is-glob - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -var isExtglob = require('is-extglob'); -var chars = { '{': '}', '(': ')', '[': ']'}; -var strictCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - var pipeIndex = -2; - var closeSquareIndex = -2; - var closeCurlyIndex = -2; - var closeParenIndex = -2; - var backSlashIndex = -2; - while (index < str.length) { - if (str[index] === '*') { - return true; - } - - if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { - return true; - } - - if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { - if (closeSquareIndex < index) { - closeSquareIndex = str.indexOf(']', index); - } - if (closeSquareIndex > index) { - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - } - } - - if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { - closeCurlyIndex = str.indexOf('}', index); - if (closeCurlyIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { - return true; - } - } - } - - if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { - closeParenIndex = str.indexOf(')', index); - if (closeParenIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - - if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { - if (pipeIndex < index) { - pipeIndex = str.indexOf('|', index); - } - if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { - closeParenIndex = str.indexOf(')', pipeIndex); - if (closeParenIndex > pipeIndex) { - backSlashIndex = str.indexOf('\\', pipeIndex); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -var relaxedCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - while (index < str.length) { - if (/[*?{}()[\]]/.test(str[index])) { - return true; - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -module.exports = function isGlob(str, options) { - if (typeof str !== 'string' || str === '') { - return false; - } - - if (isExtglob(str)) { - return true; - } - - var check = strictCheck; - - // optionally relax check - if (options && options.strict === false) { - check = relaxedCheck; - } - - return check(str); -}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json deleted file mode 100644 index 858af03..0000000 --- a/node_modules/is-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "is-glob", - "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", - "version": "4.0.3", - "homepage": "https://github.com/micromatch/is-glob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Daniel Perez (https://tuvistavie.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/is-glob", - "bugs": { - "url": "https://github.com/micromatch/is-glob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha && node benchmark.js" - }, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "layout": "default", - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "assemble", - "base", - "update", - "verb" - ] - }, - "reflinks": [ - "assemble", - "bach", - "base", - "composer", - "gulp", - "has-glob", - "is-valid-glob", - "micromatch", - "npm", - "scaffold", - "verb", - "vinyl" - ] - } -} diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/is-number/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md deleted file mode 100644 index eb8149e..0000000 --- a/node_modules/is-number/README.md +++ /dev/null @@ -1,187 +0,0 @@ -# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) - -> Returns true if the value is a finite number. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-number -``` - -## Why is this needed? - -In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: - -```js -console.log(+[]); //=> 0 -console.log(+''); //=> 0 -console.log(+' '); //=> 0 -console.log(typeof NaN); //=> 'number' -``` - -This library offers a performant way to smooth out edge cases like these. - -## Usage - -```js -const isNumber = require('is-number'); -``` - -See the [tests](./test.js) for more examples. - -### true - -```js -isNumber(5e3); // true -isNumber(0xff); // true -isNumber(-1.1); // true -isNumber(0); // true -isNumber(1); // true -isNumber(1.1); // true -isNumber(10); // true -isNumber(10.10); // true -isNumber(100); // true -isNumber('-1.1'); // true -isNumber('0'); // true -isNumber('012'); // true -isNumber('0xff'); // true -isNumber('1'); // true -isNumber('1.1'); // true -isNumber('10'); // true -isNumber('10.10'); // true -isNumber('100'); // true -isNumber('5e3'); // true -isNumber(parseInt('012')); // true -isNumber(parseFloat('012')); // true -``` - -### False - -Everything else is false, as you would expect: - -```js -isNumber(Infinity); // false -isNumber(NaN); // false -isNumber(null); // false -isNumber(undefined); // false -isNumber(''); // false -isNumber(' '); // false -isNumber('foo'); // false -isNumber([1]); // false -isNumber([]); // false -isNumber(function () {}); // false -isNumber({}); // false -``` - -## Release history - -### 7.0.0 - -* Refactor. Now uses `.isFinite` if it exists. -* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. - -### 6.0.0 - -* Optimizations, thanks to @benaadams. - -### 5.0.0 - -**Breaking changes** - -* removed support for `instanceof Number` and `instanceof String` - -## Benchmarks - -As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. - -``` -# all -v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) -v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) -parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) -fastest is 'v7.0' - -# string -v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) -v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) -parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) -fastest is 'parseFloat,v7.0' - -# number -v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) -v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) -parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) -fastest is 'v6.0' -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") -* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") -* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") -* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 49 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [charlike-old](https://github.com/charlike-old) | -| 1 | [benaadams](https://github.com/benaadams) | -| 1 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) - -### License - -Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js deleted file mode 100644 index 27f19b7..0000000 --- a/node_modules/is-number/index.js +++ /dev/null @@ -1,18 +0,0 @@ -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -module.exports = function(num) { - if (typeof num === 'number') { - return num - num === 0; - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); - } - return false; -}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json deleted file mode 100644 index 3715072..0000000 --- a/node_modules/is-number/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "is-number", - "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", - "version": "7.0.0", - "homepage": "https://github.com/jonschlinkert/is-number", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Olsten Larck (https://i.am.charlike.online)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "jonschlinkert/is-number", - "bugs": { - "url": "https://github.com/jonschlinkert/is-number/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.12.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "ansi": "^0.3.1", - "benchmark": "^2.1.4", - "gulp-format-md": "^1.0.0", - "mocha": "^3.5.3" - }, - "keywords": [ - "cast", - "check", - "coerce", - "coercion", - "finite", - "integer", - "is", - "isnan", - "is-nan", - "is-num", - "is-number", - "isnumber", - "isfinite", - "istype", - "kind", - "math", - "nan", - "num", - "number", - "numeric", - "parseFloat", - "parseInt", - "test", - "type", - "typeof", - "value" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "related": { - "list": [ - "is-plain-object", - "is-primitive", - "isobject", - "kind-of" - ] - }, - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/jsonfile/CHANGELOG.md b/node_modules/jsonfile/CHANGELOG.md deleted file mode 100644 index d772e43..0000000 --- a/node_modules/jsonfile/CHANGELOG.md +++ /dev/null @@ -1,171 +0,0 @@ -6.1.0 / 2020-10-31 ------------------- - -- Add `finalEOL` option to disable writing final EOL ([#115](https://github.com/jprichardson/node-jsonfile/issues/115), [#137](https://github.com/jprichardson/node-jsonfile/pull/137)) -- Update dependency ([#138](https://github.com/jprichardson/node-jsonfile/pull/138)) - -6.0.1 / 2020-03-07 ------------------- - -- Update dependency ([#130](https://github.com/jprichardson/node-jsonfile/pull/130)) -- Fix code style ([#129](https://github.com/jprichardson/node-jsonfile/pull/129)) - -6.0.0 / 2020-02-24 ------------------- - -- **BREAKING:** Drop support for Node 6 & 8 ([#128](https://github.com/jprichardson/node-jsonfile/pull/128)) -- **BREAKING:** Do not allow passing `null` as options to `readFile()` or `writeFile()` ([#128](https://github.com/jprichardson/node-jsonfile/pull/128)) -- Refactor internals ([#128](https://github.com/jprichardson/node-jsonfile/pull/128)) - -5.0.0 / 2018-09-08 ------------------- - -- **BREAKING:** Drop Node 4 support -- **BREAKING:** If no callback is passed to an asynchronous method, a promise is now returned ([#109](https://github.com/jprichardson/node-jsonfile/pull/109)) -- Cleanup docs - -4.0.0 / 2017-07-12 ------------------- - -- **BREAKING:** Remove global `spaces` option. -- **BREAKING:** Drop support for Node 0.10, 0.12, and io.js. -- Remove undocumented `passParsingErrors` option. -- Added `EOL` override option to `writeFile` when using `spaces`. [#89] - -3.0.1 / 2017-07-05 ------------------- - -- Fixed bug in `writeFile` when there was a serialization error & no callback was passed. In previous versions, an empty file would be written; now no file is written. - -3.0.0 / 2017-04-25 ------------------- - -- Changed behavior of `throws` option for `readFileSync`; now does not throw filesystem errors when `throws` is `false` - -2.4.0 / 2016-09-15 ------------------- -### Changed -- added optional support for `graceful-fs` [#62] - -2.3.1 / 2016-05-13 ------------------- -- fix to support BOM. [#45][#45] - -2.3.0 / 2016-04-16 ------------------- -- add `throws` to `readFile()`. See [#39][#39] -- add support for any arbitrary `fs` module. Useful with [mock-fs](https://www.npmjs.com/package/mock-fs) - -2.2.3 / 2015-10-14 ------------------- -- include file name in parse error. See: https://github.com/jprichardson/node-jsonfile/pull/34 - -2.2.2 / 2015-09-16 ------------------- -- split out tests into separate files -- fixed `throws` when set to `true` in `readFileSync()`. See: https://github.com/jprichardson/node-jsonfile/pull/33 - -2.2.1 / 2015-06-25 ------------------- -- fixed regression when passing in string as encoding for options in `writeFile()` and `writeFileSync()`. See: https://github.com/jprichardson/node-jsonfile/issues/28 - -2.2.0 / 2015-06-25 ------------------- -- added `options.spaces` to `writeFile()` and `writeFileSync()` - -2.1.2 / 2015-06-22 ------------------- -- fixed if passed `readFileSync(file, 'utf8')`. See: https://github.com/jprichardson/node-jsonfile/issues/25 - -2.1.1 / 2015-06-19 ------------------- -- fixed regressions if `null` is passed for options. See: https://github.com/jprichardson/node-jsonfile/issues/24 - -2.1.0 / 2015-06-19 ------------------- -- cleanup: JavaScript Standard Style, rename files, dropped terst for assert -- methods now support JSON revivers/replacers - -2.0.1 / 2015-05-24 ------------------- -- update license attribute https://github.com/jprichardson/node-jsonfile/pull/21 - -2.0.0 / 2014-07-28 ------------------- -* added `\n` to end of file on write. [#14](https://github.com/jprichardson/node-jsonfile/pull/14) -* added `options.throws` to `readFileSync()` -* dropped support for Node v0.8 - -1.2.0 / 2014-06-29 ------------------- -* removed semicolons -* bugfix: passed `options` to `fs.readFile` and `fs.readFileSync`. This technically changes behavior, but -changes it according to docs. [#12][#12] - -1.1.1 / 2013-11-11 ------------------- -* fixed catching of callback bug (ffissore / #5) - -1.1.0 / 2013-10-11 ------------------- -* added `options` param to methods, (seanodell / #4) - -1.0.1 / 2013-09-05 ------------------- -* removed `homepage` field from package.json to remove NPM warning - -1.0.0 / 2013-06-28 ------------------- -* added `.npmignore`, #1 -* changed spacing default from `4` to `2` to follow Node conventions - -0.0.1 / 2012-09-10 ------------------- -* Initial release. - -[#89]: https://github.com/jprichardson/node-jsonfile/pull/89 -[#45]: https://github.com/jprichardson/node-jsonfile/issues/45 "Reading of UTF8-encoded (w/ BOM) files fails" -[#44]: https://github.com/jprichardson/node-jsonfile/issues/44 "Extra characters in written file" -[#43]: https://github.com/jprichardson/node-jsonfile/issues/43 "Prettyfy json when written to file" -[#42]: https://github.com/jprichardson/node-jsonfile/pull/42 "Moved fs.readFileSync within the try/catch" -[#41]: https://github.com/jprichardson/node-jsonfile/issues/41 "Linux: Hidden file not working" -[#40]: https://github.com/jprichardson/node-jsonfile/issues/40 "autocreate folder doesn't work from Path-value" -[#39]: https://github.com/jprichardson/node-jsonfile/pull/39 "Add `throws` option for readFile (async)" -[#38]: https://github.com/jprichardson/node-jsonfile/pull/38 "Update README.md writeFile[Sync] signature" -[#37]: https://github.com/jprichardson/node-jsonfile/pull/37 "support append file" -[#36]: https://github.com/jprichardson/node-jsonfile/pull/36 "Add typescript definition file." -[#35]: https://github.com/jprichardson/node-jsonfile/pull/35 "Add typescript definition file." -[#34]: https://github.com/jprichardson/node-jsonfile/pull/34 "readFile JSON parse error includes filename" -[#33]: https://github.com/jprichardson/node-jsonfile/pull/33 "fix throw->throws typo in readFileSync()" -[#32]: https://github.com/jprichardson/node-jsonfile/issues/32 "readFile & readFileSync can possible have strip-comments as an option?" -[#31]: https://github.com/jprichardson/node-jsonfile/pull/31 "[Modify] Support string include is unicode escape string" -[#30]: https://github.com/jprichardson/node-jsonfile/issues/30 "How to use Jsonfile package in Meteor.js App?" -[#29]: https://github.com/jprichardson/node-jsonfile/issues/29 "writefile callback if no error?" -[#28]: https://github.com/jprichardson/node-jsonfile/issues/28 "writeFile options argument broken " -[#27]: https://github.com/jprichardson/node-jsonfile/pull/27 "Use svg instead of png to get better image quality" -[#26]: https://github.com/jprichardson/node-jsonfile/issues/26 "Breaking change to fs-extra" -[#25]: https://github.com/jprichardson/node-jsonfile/issues/25 "support string encoding param for read methods" -[#24]: https://github.com/jprichardson/node-jsonfile/issues/24 "readFile: Passing in null options with a callback throws an error" -[#23]: https://github.com/jprichardson/node-jsonfile/pull/23 "Add appendFile and appendFileSync" -[#22]: https://github.com/jprichardson/node-jsonfile/issues/22 "Default value for spaces in readme.md is outdated" -[#21]: https://github.com/jprichardson/node-jsonfile/pull/21 "Update license attribute" -[#20]: https://github.com/jprichardson/node-jsonfile/issues/20 "Add simple caching functionallity" -[#19]: https://github.com/jprichardson/node-jsonfile/pull/19 "Add appendFileSync method" -[#18]: https://github.com/jprichardson/node-jsonfile/issues/18 "Add updateFile and updateFileSync methods" -[#17]: https://github.com/jprichardson/node-jsonfile/issues/17 "seem read & write sync has sequentially problem" -[#16]: https://github.com/jprichardson/node-jsonfile/pull/16 "export spaces defaulted to null" -[#15]: https://github.com/jprichardson/node-jsonfile/issues/15 "`jsonfile.spaces` should default to `null`" -[#14]: https://github.com/jprichardson/node-jsonfile/pull/14 "Add EOL at EOF" -[#13]: https://github.com/jprichardson/node-jsonfile/issues/13 "Add a final newline" -[#12]: https://github.com/jprichardson/node-jsonfile/issues/12 "readFile doesn't accept options" -[#11]: https://github.com/jprichardson/node-jsonfile/pull/11 "Added try,catch to readFileSync" -[#10]: https://github.com/jprichardson/node-jsonfile/issues/10 "No output or error from writeFile" -[#9]: https://github.com/jprichardson/node-jsonfile/pull/9 "Change 'js' to 'jf' in example." -[#8]: https://github.com/jprichardson/node-jsonfile/pull/8 "Updated forgotten module.exports to me." -[#7]: https://github.com/jprichardson/node-jsonfile/pull/7 "Add file name in error message" -[#6]: https://github.com/jprichardson/node-jsonfile/pull/6 "Use graceful-fs when possible" -[#5]: https://github.com/jprichardson/node-jsonfile/pull/5 "Jsonfile doesn't behave nicely when used inside a test suite." -[#4]: https://github.com/jprichardson/node-jsonfile/pull/4 "Added options parameter to writeFile and writeFileSync" -[#3]: https://github.com/jprichardson/node-jsonfile/issues/3 "test2" -[#2]: https://github.com/jprichardson/node-jsonfile/issues/2 "homepage field must be a string url. Deleted." -[#1]: https://github.com/jprichardson/node-jsonfile/pull/1 "adding an `.npmignore` file" diff --git a/node_modules/jsonfile/LICENSE b/node_modules/jsonfile/LICENSE deleted file mode 100644 index cb7e807..0000000 --- a/node_modules/jsonfile/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2012-2015, JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/jsonfile/README.md b/node_modules/jsonfile/README.md deleted file mode 100644 index 910cde0..0000000 --- a/node_modules/jsonfile/README.md +++ /dev/null @@ -1,230 +0,0 @@ -Node.js - jsonfile -================ - -Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._ - -[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile) -[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile) -[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-jsonfile/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master) - -Standard JavaScript - -Why? ----- - -Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying. - - - -Installation ------------- - - npm install --save jsonfile - - - -API ---- - -* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback) -* [`readFileSync(filename, [options])`](#readfilesyncfilename-options) -* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback) -* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options) - ----- - -### readFile(filename, [options], callback) - -`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). - - `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback. - If `false`, returns `null` for the object. - - -```js -const jsonfile = require('jsonfile') -const file = '/tmp/data.json' -jsonfile.readFile(file, function (err, obj) { - if (err) console.error(err) - console.dir(obj) -}) -``` - -You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function. - -```js -const jsonfile = require('jsonfile') -const file = '/tmp/data.json' -jsonfile.readFile(file) - .then(obj => console.dir(obj)) - .catch(error => console.error(error)) -``` - ----- - -### readFileSync(filename, [options]) - -`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). -- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object. - -```js -const jsonfile = require('jsonfile') -const file = '/tmp/data.json' - -console.dir(jsonfile.readFileSync(file)) -``` - ----- - -### writeFile(filename, obj, [options], callback) - -`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end. - - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj, function (err) { - if (err) console.error(err) -}) -``` -Or use with promises as follows: - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj) - .then(res => { - console.log('Write complete') - }) - .catch(error => console.error(error)) -``` - - -**formatting with spaces:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) { - if (err) console.error(err) -}) -``` - -**overriding EOL:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) { - if (err) console.error(err) -}) -``` - - -**disabling the EOL at the end of file:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj, { spaces: 2, finalEOL: false }, function (err) { - if (err) console.log(err) -}) -``` - -**appending to an existing JSON file:** - -You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this. - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/mayAlreadyExistedData.json' -const obj = { name: 'JP' } - -jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) { - if (err) console.error(err) -}) -``` - ----- - -### writeFileSync(filename, obj, [options]) - -`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end. - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFileSync(file, obj) -``` - -**formatting with spaces:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFileSync(file, obj, { spaces: 2 }) -``` - -**overriding EOL:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' }) -``` - -**disabling the EOL at the end of file:** - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/data.json' -const obj = { name: 'JP' } - -jsonfile.writeFileSync(file, obj, { spaces: 2, finalEOL: false }) -``` - -**appending to an existing JSON file:** - -You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this. - -```js -const jsonfile = require('jsonfile') - -const file = '/tmp/mayAlreadyExistedData.json' -const obj = { name: 'JP' } - -jsonfile.writeFileSync(file, obj, { flag: 'a' }) -``` - -License -------- - -(MIT License) - -Copyright 2012-2016, JP Richardson diff --git a/node_modules/jsonfile/index.js b/node_modules/jsonfile/index.js deleted file mode 100644 index 0582868..0000000 --- a/node_modules/jsonfile/index.js +++ /dev/null @@ -1,88 +0,0 @@ -let _fs -try { - _fs = require('graceful-fs') -} catch (_) { - _fs = require('fs') -} -const universalify = require('universalify') -const { stringify, stripBom } = require('./utils') - -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - let data = await universalify.fromCallback(fs.readFile)(file, options) - - data = stripBom(data) - - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - - return obj -} - -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } -} - -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - - await universalify.fromCallback(fs.writeFile)(file, str, options) -} - -const writeFile = universalify.fromPromise(_writeFile) - -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) -} - -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} - -module.exports = jsonfile diff --git a/node_modules/jsonfile/package.json b/node_modules/jsonfile/package.json deleted file mode 100644 index 4d01eb1..0000000 --- a/node_modules/jsonfile/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "jsonfile", - "version": "6.1.0", - "description": "Easily read/write JSON files.", - "repository": { - "type": "git", - "url": "git@github.com:jprichardson/node-jsonfile.git" - }, - "keywords": [ - "read", - "write", - "file", - "json", - "fs", - "fs-extra" - ], - "author": "JP Richardson ", - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - }, - "devDependencies": { - "mocha": "^8.2.0", - "rimraf": "^2.4.0", - "standard": "^16.0.1" - }, - "main": "index.js", - "files": [ - "index.js", - "utils.js" - ], - "scripts": { - "lint": "standard", - "test": "npm run lint && npm run unit", - "unit": "mocha" - } -} diff --git a/node_modules/jsonfile/utils.js b/node_modules/jsonfile/utils.js deleted file mode 100644 index b5ff48e..0000000 --- a/node_modules/jsonfile/utils.js +++ /dev/null @@ -1,14 +0,0 @@ -function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) { - const EOF = finalEOL ? EOL : '' - const str = JSON.stringify(obj, replacer, spaces) - - return str.replace(/\n/g, EOL) + EOF -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } diff --git a/node_modules/lilconfig/LICENSE b/node_modules/lilconfig/LICENSE deleted file mode 100644 index fd866f4..0000000 --- a/node_modules/lilconfig/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Anton Kastritskiy - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/lilconfig/package.json b/node_modules/lilconfig/package.json deleted file mode 100644 index b64e55c..0000000 --- a/node_modules/lilconfig/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "lilconfig", - "version": "3.1.3", - "description": "A zero-dependency alternative to cosmiconfig", - "main": "src/index.js", - "types": "src/index.d.ts", - "scripts": { - "test": "NODE_OPTIONS=--experimental-vm-modules ./node_modules/.bin/jest --coverage", - "lint": "biome ci ./src", - "types": "tsc" - }, - "keywords": [ - "cosmiconfig", - "config", - "configuration", - "search" - ], - "files": [ - "src/index.*" - ], - "repository": { - "type": "git", - "url": "https://github.com/antonk52/lilconfig" - }, - "bugs": "https://github.com/antonk52/lilconfig/issues", - "author": "antonk52", - "license": "MIT", - "devDependencies": { - "@biomejs/biome": "^1.6.0", - "@types/jest": "^29.5.12", - "@types/node": "^14.18.63", - "@types/webpack-env": "^1.18.5", - "cosmiconfig": "^8.3.6", - "jest": "^29.7.0", - "typescript": "^5.3.3", - "uvu": "^0.5.6" - }, - "funding": "https://github.com/sponsors/antonk52", - "engines": { - "node": ">=14" - } -} diff --git a/node_modules/lilconfig/readme.md b/node_modules/lilconfig/readme.md deleted file mode 100644 index 99c4262..0000000 --- a/node_modules/lilconfig/readme.md +++ /dev/null @@ -1,98 +0,0 @@ -# Lilconfig ⚙️ -[![npm version](https://badge.fury.io/js/lilconfig.svg)](https://badge.fury.io/js/lilconfig) -[![install size](https://packagephobia.now.sh/badge?p=lilconfig)](https://packagephobia.now.sh/result?p=lilconfig) -[![Coverage Status](https://coveralls.io/repos/github/antonk52/lilconfig/badge.svg)](https://coveralls.io/github/antonk52/lilconfig) - -A zero-dependency alternative to [cosmiconfig](https://www.npmjs.com/package/cosmiconfig) with the same API. - -## Installation - -```sh -npm install lilconfig -``` - -## Usage - -```js -import {lilconfig, lilconfigSync} from 'lilconfig'; - -// all keys are optional -const options = { - stopDir: '/Users/you/some/dir', - searchPlaces: ['package.json', 'myapp.conf.js'], - ignoreEmptySearchPlaces: false -} - -lilconfig( - 'myapp', - options // optional -).search() // Promise - -lilconfigSync( - 'myapp', - options // optional -).load(pathToConfig) // LilconfigResult - -/** - * LilconfigResult - * { - * config: any; // your config - * filepath: string; - * } - */ -``` - -## ESM - -ESM configs can be loaded with **async API only**. Specifically `js` files in projects with `"type": "module"` in `package.json` or `mjs` files. - -## Difference to `cosmiconfig` -Lilconfig does not intend to be 100% compatible with `cosmiconfig` but tries to mimic it where possible. The key difference is **no** support for yaml files out of the box(`lilconfig` attempts to parse files with no extension as JSON instead of YAML). You can still add the support for YAML files by providing a loader, see an [example](#yaml-loader) below. - -### Options difference between the two. - -|cosmiconfig option | lilconfig | -|------------------------|-----------| -|cache | ✅ | -|loaders | ✅ | -|ignoreEmptySearchPlaces | ✅ | -|packageProp | ✅ | -|searchPlaces | ✅ | -|stopDir | ✅ | -|transform | ✅ | - -## Loaders examples - -### Yaml loader - -If you need the YAML support you can provide your own loader - -```js -import {lilconfig} from 'lilconfig'; -import yaml from 'yaml'; - -function loadYaml(filepath, content) { - return yaml.parse(content); -} - -const options = { - loaders: { - '.yaml': loadYaml, - '.yml': loadYaml, - // loader for files with no extension - noExt: loadYaml - } -}; - -lilconfig('myapp', options) - .search() - .then(result => { - result // {config, filepath} - }); -``` - -## Version correlation - -- lilconig v1 → cosmiconfig v6 -- lilconig v2 → cosmiconfig v7 -- lilconig v3 → cosmiconfig v8 diff --git a/node_modules/lilconfig/src/index.d.ts b/node_modules/lilconfig/src/index.d.ts deleted file mode 100644 index fa1146b..0000000 --- a/node_modules/lilconfig/src/index.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -export type LilconfigResult = null | { - filepath: string; - config: any; - isEmpty?: boolean; -}; -interface OptionsBase { - cache?: boolean; - stopDir?: string; - searchPlaces?: string[]; - ignoreEmptySearchPlaces?: boolean; - packageProp?: string | string[]; -} -export type Transform = - | TransformSync - | ((result: LilconfigResult) => Promise); -export type TransformSync = (result: LilconfigResult) => LilconfigResult; -type LoaderResult = any; -export type LoaderSync = (filepath: string, content: string) => LoaderResult; -export type Loader = - | LoaderSync - | ((filepath: string, content: string) => Promise); -export type Loaders = Record; -export type LoadersSync = Record; -export interface Options extends OptionsBase { - loaders?: Loaders; - transform?: Transform; -} -export interface OptionsSync extends OptionsBase { - loaders?: LoadersSync; - transform?: TransformSync; -} -export declare const defaultLoadersSync: LoadersSync; -export declare const defaultLoaders: Loaders; -type ClearCaches = { - clearLoadCache: () => void; - clearSearchCache: () => void; - clearCaches: () => void; -}; -type AsyncSearcher = { - search(searchFrom?: string): Promise; - load(filepath: string): Promise; -} & ClearCaches; -export declare function lilconfig( - name: string, - options?: Partial, -): AsyncSearcher; -type SyncSearcher = { - search(searchFrom?: string): LilconfigResult; - load(filepath: string): LilconfigResult; -} & ClearCaches; -export declare function lilconfigSync( - name: string, - options?: OptionsSync, -): SyncSearcher; diff --git a/node_modules/lilconfig/src/index.js b/node_modules/lilconfig/src/index.js deleted file mode 100644 index af7bf47..0000000 --- a/node_modules/lilconfig/src/index.js +++ /dev/null @@ -1,460 +0,0 @@ -// @ts-check -const path = require('path'); -const fs = require('fs'); -const os = require('os'); -const url = require('url'); - -const fsReadFileAsync = fs.promises.readFile; - -/** @type {(name: string, sync: boolean) => string[]} */ -function getDefaultSearchPlaces(name, sync) { - return [ - 'package.json', - `.${name}rc.json`, - `.${name}rc.js`, - `.${name}rc.cjs`, - ...(sync ? [] : [`.${name}rc.mjs`]), - `.config/${name}rc`, - `.config/${name}rc.json`, - `.config/${name}rc.js`, - `.config/${name}rc.cjs`, - ...(sync ? [] : [`.config/${name}rc.mjs`]), - `${name}.config.js`, - `${name}.config.cjs`, - ...(sync ? [] : [`${name}.config.mjs`]), - ]; -} - -/** - * @type {(p: string) => string} - * - * see #17 - * On *nix, if cwd is not under homedir, - * the last path will be '', ('/build' -> '') - * but it should be '/' actually. - * And on Windows, this will never happen. ('C:\build' -> 'C:') - */ -function parentDir(p) { - return path.dirname(p) || path.sep; -} - -/** @type {import('./index').LoaderSync} */ -const jsonLoader = (_, content) => JSON.parse(content); -// Use plain require in webpack context for dynamic import -const requireFunc = - typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require; -/** @type {import('./index').LoadersSync} */ -const defaultLoadersSync = Object.freeze({ - '.js': requireFunc, - '.json': requireFunc, - '.cjs': requireFunc, - noExt: jsonLoader, -}); -module.exports.defaultLoadersSync = defaultLoadersSync; - -/** @type {import('./index').Loader} */ -const dynamicImport = async id => { - try { - const fileUrl = url.pathToFileURL(id).href; - const mod = await import(/* webpackIgnore: true */ fileUrl); - - return mod.default; - } catch (e) { - try { - return requireFunc(id); - } catch (/** @type {any} */ requireE) { - if ( - requireE.code === 'ERR_REQUIRE_ESM' || - (requireE instanceof SyntaxError && - requireE - .toString() - .includes('Cannot use import statement outside a module')) - ) { - throw e; - } - throw requireE; - } - } -}; - -/** @type {import('./index').Loaders} */ -const defaultLoaders = Object.freeze({ - '.js': dynamicImport, - '.mjs': dynamicImport, - '.cjs': dynamicImport, - '.json': jsonLoader, - noExt: jsonLoader, -}); -module.exports.defaultLoaders = defaultLoaders; - -/** - * @param {string} name - * @param {import('./index').Options | import('./index').OptionsSync} options - * @param {boolean} sync - * @returns {Required} - */ -function getOptions(name, options, sync) { - /** @type {Required} */ - const conf = { - stopDir: os.homedir(), - searchPlaces: getDefaultSearchPlaces(name, sync), - ignoreEmptySearchPlaces: true, - cache: true, - transform: x => x, - packageProp: [name], - ...options, - loaders: { - ...(sync ? defaultLoadersSync : defaultLoaders), - ...options.loaders, - }, - }; - conf.searchPlaces.forEach(place => { - const key = path.extname(place) || 'noExt'; - const loader = conf.loaders[key]; - if (!loader) { - throw new Error(`Missing loader for extension "${place}"`); - } - - if (typeof loader !== 'function') { - throw new Error( - `Loader for extension "${place}" is not a function: Received ${typeof loader}.`, - ); - } - }); - - return conf; -} - -/** @type {(props: string | string[], obj: Record) => unknown} */ -function getPackageProp(props, obj) { - if (typeof props === 'string' && props in obj) return obj[props]; - return ( - (Array.isArray(props) ? props : props.split('.')).reduce( - (acc, prop) => (acc === undefined ? acc : acc[prop]), - obj, - ) || null - ); -} - -/** @param {string} filepath */ -function validateFilePath(filepath) { - if (!filepath) throw new Error('load must pass a non-empty string'); -} - -/** @type {(loader: import('./index').Loader, ext: string) => void} */ -function validateLoader(loader, ext) { - if (!loader) throw new Error(`No loader specified for extension "${ext}"`); - if (typeof loader !== 'function') throw new Error('loader is not a function'); -} - -/** @type {(enableCache: boolean) => (c: Map, filepath: string, res: T) => T} */ -const makeEmplace = enableCache => (c, filepath, res) => { - if (enableCache) c.set(filepath, res); - return res; -}; - -/** @type {import('./index').lilconfig} */ -module.exports.lilconfig = function lilconfig(name, options) { - const { - ignoreEmptySearchPlaces, - loaders, - packageProp, - searchPlaces, - stopDir, - transform, - cache, - } = getOptions(name, options ?? {}, false); - const searchCache = new Map(); - const loadCache = new Map(); - const emplace = makeEmplace(cache); - - return { - async search(searchFrom = process.cwd()) { - /** @type {import('./index').LilconfigResult} */ - const result = { - config: null, - filepath: '', - }; - - /** @type {Set} */ - const visited = new Set(); - let dir = searchFrom; - dirLoop: while (true) { - if (cache) { - const r = searchCache.get(dir); - if (r !== undefined) { - for (const p of visited) searchCache.set(p, r); - return r; - } - visited.add(dir); - } - - for (const searchPlace of searchPlaces) { - const filepath = path.join(dir, searchPlace); - try { - await fs.promises.access(filepath); - } catch { - continue; - } - const content = String(await fsReadFileAsync(filepath)); - const loaderKey = path.extname(searchPlace) || 'noExt'; - const loader = loaders[loaderKey]; - - // handle package.json - if (searchPlace === 'package.json') { - const pkg = await loader(filepath, content); - const maybeConfig = getPackageProp(packageProp, pkg); - if (maybeConfig != null) { - result.config = maybeConfig; - result.filepath = filepath; - break dirLoop; - } - - continue; - } - - // handle other type of configs - const isEmpty = content.trim() === ''; - if (isEmpty && ignoreEmptySearchPlaces) continue; - - if (isEmpty) { - result.isEmpty = true; - result.config = undefined; - } else { - validateLoader(loader, loaderKey); - result.config = await loader(filepath, content); - } - result.filepath = filepath; - break dirLoop; - } - if (dir === stopDir || dir === parentDir(dir)) break dirLoop; - dir = parentDir(dir); - } - - const transformed = - // not found - result.filepath === '' && result.config === null - ? transform(null) - : transform(result); - - if (cache) { - for (const p of visited) searchCache.set(p, transformed); - } - - return transformed; - }, - async load(filepath) { - validateFilePath(filepath); - const absPath = path.resolve(process.cwd(), filepath); - if (cache && loadCache.has(absPath)) { - return loadCache.get(absPath); - } - const {base, ext} = path.parse(absPath); - const loaderKey = ext || 'noExt'; - const loader = loaders[loaderKey]; - validateLoader(loader, loaderKey); - const content = String(await fsReadFileAsync(absPath)); - - if (base === 'package.json') { - const pkg = await loader(absPath, content); - return emplace( - loadCache, - absPath, - transform({ - config: getPackageProp(packageProp, pkg), - filepath: absPath, - }), - ); - } - /** @type {import('./index').LilconfigResult} */ - const result = { - config: null, - filepath: absPath, - }; - // handle other type of configs - const isEmpty = content.trim() === ''; - if (isEmpty && ignoreEmptySearchPlaces) - return emplace( - loadCache, - absPath, - transform({ - config: undefined, - filepath: absPath, - isEmpty: true, - }), - ); - - // cosmiconfig returns undefined for empty files - result.config = isEmpty ? undefined : await loader(absPath, content); - - return emplace( - loadCache, - absPath, - transform(isEmpty ? {...result, isEmpty, config: undefined} : result), - ); - }, - clearLoadCache() { - if (cache) loadCache.clear(); - }, - clearSearchCache() { - if (cache) searchCache.clear(); - }, - clearCaches() { - if (cache) { - loadCache.clear(); - searchCache.clear(); - } - }, - }; -}; - -/** @type {import('./index').lilconfigSync} */ -module.exports.lilconfigSync = function lilconfigSync(name, options) { - const { - ignoreEmptySearchPlaces, - loaders, - packageProp, - searchPlaces, - stopDir, - transform, - cache, - } = getOptions(name, options ?? {}, true); - const searchCache = new Map(); - const loadCache = new Map(); - const emplace = makeEmplace(cache); - - return { - search(searchFrom = process.cwd()) { - /** @type {import('./index').LilconfigResult} */ - const result = { - config: null, - filepath: '', - }; - - /** @type {Set} */ - const visited = new Set(); - let dir = searchFrom; - dirLoop: while (true) { - if (cache) { - const r = searchCache.get(dir); - if (r !== undefined) { - for (const p of visited) searchCache.set(p, r); - return r; - } - visited.add(dir); - } - - for (const searchPlace of searchPlaces) { - const filepath = path.join(dir, searchPlace); - try { - fs.accessSync(filepath); - } catch { - continue; - } - const loaderKey = path.extname(searchPlace) || 'noExt'; - const loader = loaders[loaderKey]; - const content = String(fs.readFileSync(filepath)); - - // handle package.json - if (searchPlace === 'package.json') { - const pkg = loader(filepath, content); - const maybeConfig = getPackageProp(packageProp, pkg); - if (maybeConfig != null) { - result.config = maybeConfig; - result.filepath = filepath; - break dirLoop; - } - - continue; - } - - // handle other type of configs - const isEmpty = content.trim() === ''; - if (isEmpty && ignoreEmptySearchPlaces) continue; - - if (isEmpty) { - result.isEmpty = true; - result.config = undefined; - } else { - validateLoader(loader, loaderKey); - result.config = loader(filepath, content); - } - result.filepath = filepath; - break dirLoop; - } - if (dir === stopDir || dir === parentDir(dir)) break dirLoop; - dir = parentDir(dir); - } - - const transformed = - // not found - result.filepath === '' && result.config === null - ? transform(null) - : transform(result); - - if (cache) { - for (const p of visited) searchCache.set(p, transformed); - } - - return transformed; - }, - load(filepath) { - validateFilePath(filepath); - const absPath = path.resolve(process.cwd(), filepath); - if (cache && loadCache.has(absPath)) { - return loadCache.get(absPath); - } - const {base, ext} = path.parse(absPath); - const loaderKey = ext || 'noExt'; - const loader = loaders[loaderKey]; - validateLoader(loader, loaderKey); - - const content = String(fs.readFileSync(absPath)); - - if (base === 'package.json') { - const pkg = loader(absPath, content); - return transform({ - config: getPackageProp(packageProp, pkg), - filepath: absPath, - }); - } - const result = { - config: null, - filepath: absPath, - }; - // handle other type of configs - const isEmpty = content.trim() === ''; - if (isEmpty && ignoreEmptySearchPlaces) - return emplace( - loadCache, - absPath, - transform({ - filepath: absPath, - config: undefined, - isEmpty: true, - }), - ); - - // cosmiconfig returns undefined for empty files - result.config = isEmpty ? undefined : loader(absPath, content); - - return emplace( - loadCache, - absPath, - transform(isEmpty ? {...result, isEmpty, config: undefined} : result), - ); - }, - clearLoadCache() { - if (cache) loadCache.clear(); - }, - clearSearchCache() { - if (cache) searchCache.clear(); - }, - clearCaches() { - if (cache) { - loadCache.clear(); - searchCache.clear(); - } - }, - }; -}; diff --git a/node_modules/merge2/LICENSE b/node_modules/merge2/LICENSE deleted file mode 100644 index 31dd9c7..0000000 --- a/node_modules/merge2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2020 Teambition - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/merge2/README.md b/node_modules/merge2/README.md deleted file mode 100644 index 27f8eb9..0000000 --- a/node_modules/merge2/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# merge2 - -Merge multiple streams into one stream in sequence or parallel. - -[![NPM version][npm-image]][npm-url] -[![Build Status][travis-image]][travis-url] -[![Downloads][downloads-image]][downloads-url] - -## Install - -Install with [npm](https://npmjs.org/package/merge2) - -```sh -npm install merge2 -``` - -## Usage - -```js -const gulp = require('gulp') -const merge2 = require('merge2') -const concat = require('gulp-concat') -const minifyHtml = require('gulp-minify-html') -const ngtemplate = require('gulp-ngtemplate') - -gulp.task('app-js', function () { - return merge2( - gulp.src('static/src/tpl/*.html') - .pipe(minifyHtml({empty: true})) - .pipe(ngtemplate({ - module: 'genTemplates', - standalone: true - }) - ), gulp.src([ - 'static/src/js/app.js', - 'static/src/js/locale_zh-cn.js', - 'static/src/js/router.js', - 'static/src/js/tools.js', - 'static/src/js/services.js', - 'static/src/js/filters.js', - 'static/src/js/directives.js', - 'static/src/js/controllers.js' - ]) - ) - .pipe(concat('app.js')) - .pipe(gulp.dest('static/dist/js/')) -}) -``` - -```js -const stream = merge2([stream1, stream2], stream3, {end: false}) -//... -stream.add(stream4, stream5) -//.. -stream.end() -``` - -```js -// equal to merge2([stream1, stream2], stream3) -const stream = merge2() -stream.add([stream1, stream2]) -stream.add(stream3) -``` - -```js -// merge order: -// 1. merge `stream1`; -// 2. merge `stream2` and `stream3` in parallel after `stream1` merged; -// 3. merge 'stream4' after `stream2` and `stream3` merged; -const stream = merge2(stream1, [stream2, stream3], stream4) - -// merge order: -// 1. merge `stream5` and `stream6` in parallel after `stream4` merged; -// 2. merge 'stream7' after `stream5` and `stream6` merged; -stream.add([stream5, stream6], stream7) -``` - -```js -// nest merge -// equal to merge2(stream1, stream2, stream6, stream3, [stream4, stream5]); -const streamA = merge2(stream1, stream2) -const streamB = merge2(stream3, [stream4, stream5]) -const stream = merge2(streamA, streamB) -streamA.add(stream6) -``` - -## API - -```js -const merge2 = require('merge2') -``` - -### merge2() - -### merge2(options) - -### merge2(stream1, stream2, ..., streamN) - -### merge2(stream1, stream2, ..., streamN, options) - -### merge2(stream1, [stream2, stream3, ...], streamN, options) - -return a duplex stream (mergedStream). streams in array will be merged in parallel. - -### mergedStream.add(stream) - -### mergedStream.add(stream1, [stream2, stream3, ...], ...) - -return the mergedStream. - -### mergedStream.on('queueDrain', function() {}) - -It will emit 'queueDrain' when all streams merged. If you set `end === false` in options, this event give you a notice that should add more streams to merge or end the mergedStream. - -#### stream - -*option* -Type: `Readable` or `Duplex` or `Transform` stream. - -#### options - -*option* -Type: `Object`. - -* **end** - `Boolean` - if `end === false` then mergedStream will not be auto ended, you should end by yourself. **Default:** `undefined` - -* **pipeError** - `Boolean` - if `pipeError === true` then mergedStream will emit `error` event from source streams. **Default:** `undefined` - -* **objectMode** - `Boolean` . **Default:** `true` - -`objectMode` and other options(`highWaterMark`, `defaultEncoding` ...) is same as Node.js `Stream`. - -## License - -MIT © [Teambition](https://www.teambition.com) - -[npm-url]: https://npmjs.org/package/merge2 -[npm-image]: http://img.shields.io/npm/v/merge2.svg - -[travis-url]: https://travis-ci.org/teambition/merge2 -[travis-image]: http://img.shields.io/travis/teambition/merge2.svg - -[downloads-url]: https://npmjs.org/package/merge2 -[downloads-image]: http://img.shields.io/npm/dm/merge2.svg?style=flat-square diff --git a/node_modules/merge2/index.js b/node_modules/merge2/index.js deleted file mode 100644 index 78a61ed..0000000 --- a/node_modules/merge2/index.js +++ /dev/null @@ -1,144 +0,0 @@ -'use strict' -/* - * merge2 - * https://github.com/teambition/merge2 - * - * Copyright (c) 2014-2020 Teambition - * Licensed under the MIT license. - */ -const Stream = require('stream') -const PassThrough = Stream.PassThrough -const slice = Array.prototype.slice - -module.exports = merge2 - -function merge2 () { - const streamsQueue = [] - const args = slice.call(arguments) - let merging = false - let options = args[args.length - 1] - - if (options && !Array.isArray(options) && options.pipe == null) { - args.pop() - } else { - options = {} - } - - const doEnd = options.end !== false - const doPipeError = options.pipeError === true - if (options.objectMode == null) { - options.objectMode = true - } - if (options.highWaterMark == null) { - options.highWaterMark = 64 * 1024 - } - const mergedStream = PassThrough(options) - - function addStream () { - for (let i = 0, len = arguments.length; i < len; i++) { - streamsQueue.push(pauseStreams(arguments[i], options)) - } - mergeStream() - return this - } - - function mergeStream () { - if (merging) { - return - } - merging = true - - let streams = streamsQueue.shift() - if (!streams) { - process.nextTick(endStream) - return - } - if (!Array.isArray(streams)) { - streams = [streams] - } - - let pipesCount = streams.length + 1 - - function next () { - if (--pipesCount > 0) { - return - } - merging = false - mergeStream() - } - - function pipe (stream) { - function onend () { - stream.removeListener('merge2UnpipeEnd', onend) - stream.removeListener('end', onend) - if (doPipeError) { - stream.removeListener('error', onerror) - } - next() - } - function onerror (err) { - mergedStream.emit('error', err) - } - // skip ended stream - if (stream._readableState.endEmitted) { - return next() - } - - stream.on('merge2UnpipeEnd', onend) - stream.on('end', onend) - - if (doPipeError) { - stream.on('error', onerror) - } - - stream.pipe(mergedStream, { end: false }) - // compatible for old stream - stream.resume() - } - - for (let i = 0; i < streams.length; i++) { - pipe(streams[i]) - } - - next() - } - - function endStream () { - merging = false - // emit 'queueDrain' when all streams merged. - mergedStream.emit('queueDrain') - if (doEnd) { - mergedStream.end() - } - } - - mergedStream.setMaxListeners(0) - mergedStream.add = addStream - mergedStream.on('unpipe', function (stream) { - stream.emit('merge2UnpipeEnd') - }) - - if (args.length) { - addStream.apply(null, args) - } - return mergedStream -} - -// check and pause streams for pipe. -function pauseStreams (streams, options) { - if (!Array.isArray(streams)) { - // Backwards-compat with old-style streams - if (!streams._readableState && streams.pipe) { - streams = streams.pipe(PassThrough(options)) - } - if (!streams._readableState || !streams.pause || !streams.pipe) { - throw new Error('Only readable stream can be merged.') - } - streams.pause() - } else { - for (let i = 0, len = streams.length; i < len; i++) { - streams[i] = pauseStreams(streams[i], options) - } - } - return streams -} diff --git a/node_modules/merge2/package.json b/node_modules/merge2/package.json deleted file mode 100644 index 7777307..0000000 --- a/node_modules/merge2/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "merge2", - "description": "Merge multiple streams into one stream in sequence or parallel.", - "authors": [ - "Yan Qing " - ], - "license": "MIT", - "version": "1.4.1", - "main": "./index.js", - "repository": { - "type": "git", - "url": "git@github.com:teambition/merge2.git" - }, - "homepage": "https://github.com/teambition/merge2", - "keywords": [ - "merge2", - "multiple", - "sequence", - "parallel", - "merge", - "stream", - "merge stream", - "sync" - ], - "engines": { - "node": ">= 8" - }, - "dependencies": {}, - "devDependencies": { - "standard": "^14.3.4", - "through2": "^3.0.1", - "thunks": "^4.9.6", - "tman": "^1.10.0", - "to-through": "^2.0.0" - }, - "scripts": { - "test": "standard && tman" - }, - "files": [ - "README.md", - "index.js" - ] -} diff --git a/node_modules/micromatch/LICENSE b/node_modules/micromatch/LICENSE deleted file mode 100755 index 9af4a67..0000000 --- a/node_modules/micromatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/micromatch/README.md b/node_modules/micromatch/README.md deleted file mode 100644 index d72a059..0000000 --- a/node_modules/micromatch/README.md +++ /dev/null @@ -1,1024 +0,0 @@ -# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Tests](https://github.com/micromatch/micromatch/actions/workflows/test.yml/badge.svg)](https://github.com/micromatch/micromatch/actions/workflows/test.yml) - -> Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Table of Contents - -
-Details - - * [Install](#install) -- [Sponsors](#sponsors) - * [Gold Sponsors](#gold-sponsors) - * [Quickstart](#quickstart) - * [Why use micromatch?](#why-use-micromatch) - + [Matching features](#matching-features) - * [Switching to micromatch](#switching-to-micromatch) - + [From minimatch](#from-minimatch) - + [From multimatch](#from-multimatch) - * [API](#api) - * [Options](#options) - * [Options Examples](#options-examples) - + [options.basename](#optionsbasename) - + [options.bash](#optionsbash) - + [options.expandRange](#optionsexpandrange) - + [options.format](#optionsformat) - + [options.ignore](#optionsignore) - + [options.matchBase](#optionsmatchbase) - + [options.noextglob](#optionsnoextglob) - + [options.nonegate](#optionsnonegate) - + [options.noglobstar](#optionsnoglobstar) - + [options.nonull](#optionsnonull) - + [options.nullglob](#optionsnullglob) - + [options.onIgnore](#optionsonignore) - + [options.onMatch](#optionsonmatch) - + [options.onResult](#optionsonresult) - + [options.posixSlashes](#optionsposixslashes) - + [options.unescape](#optionsunescape) - * [Extended globbing](#extended-globbing) - + [Extglobs](#extglobs) - + [Braces](#braces) - + [Regex character classes](#regex-character-classes) - + [Regex groups](#regex-groups) - + [POSIX bracket expressions](#posix-bracket-expressions) - * [Notes](#notes) - + [Bash 4.3 parity](#bash-43-parity) - + [Backslashes](#backslashes) - * [Benchmarks](#benchmarks) - + [Running benchmarks](#running-benchmarks) - + [Latest results](#latest-results) - * [Contributing](#contributing) - * [About](#about) - -
- -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save micromatch -``` - -
- -# Sponsors - -[Become a Sponsor](https://github.com/sponsors/jonschlinkert) to add your logo to this README, or any of [my other projects](https://github.com/jonschlinkert?tab=repositories&q=&type=&language=&sort=stargazers) - -
- -## Quickstart - -```js -const micromatch = require('micromatch'); -// micromatch(list, patterns[, options]); -``` - -The [main export](#micromatch) takes a list of strings and one or more glob patterns: - -```js -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['f*', 'b*'])) //=> ['foo', 'bar', 'baz'] -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['*', '!b*'])) //=> ['foo', 'qux'] -``` - -Use [.isMatch()](#ismatch) to for boolean matching: - -```js -console.log(micromatch.isMatch('foo', 'f*')) //=> true -console.log(micromatch.isMatch('foo', ['b*', 'f*'])) //=> true -``` - -[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! - -
- -## Why use micromatch? - -> micromatch is a [replacement](#switching-to-micromatch) for minimatch and multimatch - -* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) -* More complete support for the Bash 4.3 specification than minimatch and multimatch. Micromatch passes _all of the spec tests_ from bash, including some that bash still fails. -* **Fast & Performant** - Loads in about 5ms and performs [fast matches](#benchmarks). -* **Glob matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories -* **[Advanced globbing](#extended-globbing)** - Supports [extglobs](#extglobs), [braces](#braces-1), and [POSIX brackets](#posix-bracket-expressions), and support for escaping special characters with `\` or quotes. -* **Accurate** - Covers more scenarios [than minimatch](https://github.com/yarnpkg/yarn/pull/3339) -* **Well tested** - More than 5,000 [test assertions](./test) -* **Windows support** - More reliable windows support than minimatch and multimatch. -* **[Safe](https://github.com/micromatch/braces#braces-is-safe)** - Micromatch is not subject to DoS with brace patterns like minimatch and multimatch. - -### Matching features - -* Support for multiple glob patterns (no need for wrappers like multimatch) -* Wildcards (`**`, `*.js`) -* Negation (`'!a/*.js'`, `'*!(b).js'`) -* [extglobs](#extglobs) (`+(x|y)`, `!(a|b)`) -* [POSIX character classes](#posix-bracket-expressions) (`[[:alpha:][:digit:]]`) -* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) -* regex character classes (`foo-[1-5].js`) -* regex logical "or" (`foo/(abc|xyz).js`) - -You can mix and match these features to create whatever patterns you need! - -## Switching to micromatch - -_(There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information.)_ - -### From minimatch - -Use [micromatch.isMatch()](#ismatch) instead of `minimatch()`: - -```js -console.log(micromatch.isMatch('foo', 'b*')); //=> false -``` - -Use [micromatch.match()](#match) instead of `minimatch.match()`: - -```js -console.log(micromatch.match(['foo', 'bar'], 'b*')); //=> 'bar' -``` - -### From multimatch - -Same signature: - -```js -console.log(micromatch(['foo', 'bar', 'baz'], ['f*', '*z'])); //=> ['foo', 'baz'] -``` - -## API - -**Params** - -* `list` **{String|Array}**: List of strings to match. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) -* `returns` **{Array}**: Returns an array of matches - -**Example** - -```js -const mm = require('micromatch'); -// mm(list, patterns[, options]); - -console.log(mm(['a.js', 'a.txt'], ['*.js'])); -//=> [ 'a.js' ] -``` - -### [.matcher](index.js#L109) - -Returns a matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. - -**Params** - -* `pattern` **{String}**: Glob pattern -* `options` **{Object}** -* `returns` **{Function}**: Returns a matcher function. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matcher(pattern[, options]); - -const isMatch = mm.matcher('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.isMatch](index.js#L128) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* `str` **{String}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `[options]` **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.isMatch(string, patterns[, options]); - -console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(mm.isMatch('a.a', 'b.*')); //=> false -``` - -### [.not](index.js#L153) - -Returns a list of strings that _**do not match any**_ of the given `patterns`. - -**Params** - -* `list` **{Array}**: Array of strings to match. -* `patterns` **{String|Array}**: One or more glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.not(list, patterns[, options]); - -console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); -//=> ['b.b', 'c.c'] -``` - -### [.contains](index.js#L193) - -Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. - -**Params** - -* `str` **{String}**: The string to match. -* `patterns` **{String|Array}**: Glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any of the patterns matches any part of `str`. - -**Example** - -```js -var mm = require('micromatch'); -// mm.contains(string, pattern[, options]); - -console.log(mm.contains('aa/bb/cc', '*b')); -//=> true -console.log(mm.contains('aa/bb/cc', '*d')); -//=> false -``` - -### [.matchKeys](index.js#L235) - -Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. - -**Params** - -* `object` **{Object}**: The object with keys to filter. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Object}**: Returns an object with only keys that match the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matchKeys(object, patterns[, options]); - -const obj = { aa: 'a', ab: 'b', ac: 'c' }; -console.log(mm.matchKeys(obj, '*b')); -//=> { ab: 'b' } -``` - -### [.some](index.js#L264) - -Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any `patterns` matches any of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.some(list, patterns[, options]); - -console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// true -console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.every](index.js#L300) - -Returns true if every string in the given `list` matches any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if all `patterns` matches all of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.every(list, patterns[, options]); - -console.log(mm.every('foo.js', ['foo.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// false -console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.all](index.js#L339) - -Returns true if **all** of the given `patterns` match the specified string. - -**Params** - -* `str` **{String|Array}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.all(string, patterns[, options]); - -console.log(mm.all('foo.js', ['foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', '!foo.js'])); -// false - -console.log(mm.all('foo.js', ['*.js', 'foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); -// true -``` - -### [.capture](index.js#L366) - -Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. - -**Params** - -* `glob` **{String}**: Glob pattern to use for matching. -* `input` **{String}**: String to match -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array|null}**: Returns an array of captures if the input matches the glob pattern, otherwise `null`. - -**Example** - -```js -const mm = require('micromatch'); -// mm.capture(pattern, string[, options]); - -console.log(mm.capture('test/*.js', 'test/foo.js')); -//=> ['foo'] -console.log(mm.capture('test/*.js', 'foo/bar.css')); -//=> null -``` - -### [.makeRe](index.js#L392) - -Create a regular expression from the given glob `pattern`. - -**Params** - -* `pattern` **{String}**: A glob pattern to convert to regex. -* `options` **{Object}** -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const mm = require('micromatch'); -// mm.makeRe(pattern[, options]); - -console.log(mm.makeRe('*.js')); -//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ -``` - -### [.scan](index.js#L408) - -Scan a glob pattern to separate the pattern into segments. Used by the [split](#split) method. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.scan(pattern[, options]); -``` - -### [.parse](index.js#L424) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `glob` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as regex source string. - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.parse(pattern[, options]); -``` - -### [.braces](index.js#L451) - -Process the given brace `pattern`. - -**Params** - -* `pattern` **{String}**: String with brace pattern to process. -* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. -* `returns` **{Array}** - -**Example** - -```js -const { braces } = require('micromatch'); -console.log(braces('foo/{a,b,c}/bar')); -//=> [ 'foo/(a|b|c)/bar' ] - -console.log(braces('foo/{a,b,c}/bar', { expand: true })); -//=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] -``` - -## Options - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Match dotfiles. Otherwise dotfiles are ignored unless a `.` is explicitly defined in the pattern. | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. This option is overridden by the `expandBrace` option. | -| `failglob` | `boolean` | `false` | Similar to the `failglob` behavior in Bash, throws an error when no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `boolean` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `lookbehinds` | `boolean` | `true` | Support regex positive and negative lookbehinds. Note that you must be using Node 8.1.10 or higher to enable regex lookbehinds. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Perform case-insensitive matching. Equivalent to the regex `i` flag. Note that this option is ignored when the `flags` option is defined. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with [extglobs](#extglobs) (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support [POSIX character classes](#posix-bracket-expressions) ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `string` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove preceding backslashes from escaped glob characters before creating the regular expression to perform matches. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatitibility. | - -## Options Examples - -### options.basename - -Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. - -**Type**: `Boolean` - -**Default**: `false` - -**Example** - -```js -micromatch(['a/b.js', 'a/c.md'], '*.js'); -//=> [] - -micromatch(['a/b.js', 'a/c.md'], '*.js', { basename: true }); -//=> ['a/b.js'] -``` - -### options.bash - -Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as any other star. - -**Type**: `Boolean` - -**Default**: `true` - -**Example** - -```js -const files = ['abc', 'ajz']; -console.log(micromatch(files, '[a-c]*')); -//=> ['abc', 'ajz'] - -console.log(micromatch(files, '[a-c]*', { bash: false })); -``` - -### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a numeric folder name between `01` and `25`, with leading zeros. - -```js -const fill = require('fill-range'); -const regex = micromatch.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex) -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')) //=> true -``` - -### options.ignore - -String or array of glob patterns to match files to ignore. - -**Type**: `String|Array` - -**Default**: `undefined` - -```js -const isMatch = micromatch.matcher('*', { ignore: 'f*' }); -console.log(isMatch('foo')) //=> false -console.log(isMatch('bar')) //=> true -console.log(isMatch('baz')) //=> true -``` - -### options.matchBase - -Alias for [options.basename](#options-basename). - -### options.noextglob - -Disable extglob support, so that [extglobs](#extglobs) are regarded as literal characters. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Examples** - -```js -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)')); -//=> ['a/b', 'a/!(z)'] - -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', { noextglob: true })); -//=> ['a/!(z)'] (matches only as literal characters) -``` - -### options.nonegate - -Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.noglobstar - -Disable matching with globstars (`**`). - -**Type**: `Boolean` - -**Default**: `undefined` - -```js -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); -//=> ['a/b', 'a/b/c', 'a/b/c/d'] - -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); -//=> ['a/b'] -``` - -### options.nonull - -Alias for [options.nullglob](#options-nullglob). - -### options.nullglob - -If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); - // { glob: '*', regex: /^(?:(?!\.)(?=.)[^\/]*?\/?)$/, input: 'foo', output: 'foo' } -}; - -const isMatch = micromatch.matcher('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ input, output }); - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } -}; - -const isMatch = micromatch.matcher('**', { onMatch, posixSlashes: true }); -isMatch('some\\path'); -isMatch('some\\path'); -isMatch('some\\path'); -``` - -### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = micromatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.posixSlashes - -Convert path separators on returned files to posix/unix-style forward slashes. Aliased as `unixify` for backwards compatibility. - -**Type**: `Boolean` - -**Default**: `true` on windows, `false` everywhere else. - -**Example** - -```js -console.log(micromatch.match(['a\\b\\c'], 'a/**')); -//=> ['a/b/c'] - -console.log(micromatch.match(['a\\b\\c'], { posixSlashes: false })); -//=> ['a\\b\\c'] -``` - -### options.unescape - -Remove backslashes from escaped glob characters before creating the regular expression to perform matches. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Example** - -In this example we want to match a literal `*`: - -```js -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c')); -//=> ['a\\*c'] - -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c', { unescape: true })); -//=> ['a*c'] -``` - -
-
- -## Extended globbing - -Micromatch supports the following extended globbing features. - -### Extglobs - -Extended globbing, as described by the bash man page: - -| **pattern** | **regex equivalent** | **description** | -| --- | --- | --- | -| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | -| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | -| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | -| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | -| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | - -* Note that `@` isn't a regex character. - -### Braces - -Brace patterns can be used to match specific ranges or sets of characters. - -**Example** - -The pattern `{f,b}*/{1..3}/{b,q}*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. - -### Regex character classes - -Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` -* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` -* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` - -Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). - -### Regex groups - -Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` -* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` -* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` - -As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. - -### POSIX bracket expressions - -POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. - -**Example** - -```js -console.log(micromatch.isMatch('a1', '[[:alpha:][:digit:]]')) //=> true -console.log(micromatch.isMatch('a1', '[[:alpha:][:alpha:]]')) //=> false -``` - -*** - -## Notes - -### Bash 4.3 parity - -Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. - -However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. - -### Backslashes - -There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. - -* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows, which is consistent with bash behavior. _More importantly, unescaping globs can result in unsafe regular expressions_. -* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. - -We made this decision for micromatch for a couple of reasons: - -* Consistency with bash conventions. -* Glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. - -**A note about joining paths to globs** - -Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. - -In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. - -To solve this, you might be inspired to do something like `'foo\\*'.replace(/\\/g, '/')`, but this causes another, potentially much more serious, problem. - -## Benchmarks - -### Running benchmarks - -Install dependencies for running benchmarks: - -```sh -$ cd bench && npm install -``` - -Run the benchmarks: - -```sh -$ npm run bench -``` - -### Latest results - -As of August 23, 2024 (longer bars are better): - -```sh -# .makeRe star - micromatch x 2,232,802 ops/sec ±2.34% (89 runs sampled)) - minimatch x 781,018 ops/sec ±6.74% (92 runs sampled)) - -# .makeRe star; dot=true - micromatch x 1,863,453 ops/sec ±0.74% (93 runs sampled) - minimatch x 723,105 ops/sec ±0.75% (93 runs sampled) - -# .makeRe globstar - micromatch x 1,624,179 ops/sec ±2.22% (91 runs sampled) - minimatch x 1,117,230 ops/sec ±2.78% (86 runs sampled)) - -# .makeRe globstars - micromatch x 1,658,642 ops/sec ±0.86% (92 runs sampled) - minimatch x 741,224 ops/sec ±1.24% (89 runs sampled)) - -# .makeRe with leading star - micromatch x 1,525,014 ops/sec ±1.63% (90 runs sampled) - minimatch x 561,074 ops/sec ±3.07% (89 runs sampled) - -# .makeRe - braces - micromatch x 172,478 ops/sec ±2.37% (78 runs sampled) - minimatch x 96,087 ops/sec ±2.34% (88 runs sampled))) - -# .makeRe braces - range (expanded) - micromatch x 26,973 ops/sec ±0.84% (89 runs sampled) - minimatch x 3,023 ops/sec ±0.99% (90 runs sampled)) - -# .makeRe braces - range (compiled) - micromatch x 152,892 ops/sec ±1.67% (83 runs sampled) - minimatch x 992 ops/sec ±3.50% (89 runs sampled)d)) - -# .makeRe braces - nested ranges (expanded) - micromatch x 15,816 ops/sec ±13.05% (80 runs sampled) - minimatch x 2,953 ops/sec ±1.64% (91 runs sampled) - -# .makeRe braces - nested ranges (compiled) - micromatch x 110,881 ops/sec ±1.85% (82 runs sampled) - minimatch x 1,008 ops/sec ±1.51% (91 runs sampled) - -# .makeRe braces - set (compiled) - micromatch x 134,930 ops/sec ±3.54% (63 runs sampled)) - minimatch x 43,242 ops/sec ±0.60% (93 runs sampled) - -# .makeRe braces - nested sets (compiled) - micromatch x 94,455 ops/sec ±1.74% (69 runs sampled)) - minimatch x 27,720 ops/sec ±1.84% (93 runs sampled)) -``` - -## Contributing - -All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. - -**Bug reports** - -Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: - -* [research existing issues first](../../issues) (open and closed) -* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern -* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js -* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. - -**Platform issues** - -It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") -* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/micromatch/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") -* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 523 | [jonschlinkert](https://github.com/jonschlinkert) | -| 12 | [es128](https://github.com/es128) | -| 9 | [danez](https://github.com/danez) | -| 8 | [doowb](https://github.com/doowb) | -| 6 | [paulmillr](https://github.com/paulmillr) | -| 5 | [mrmlnc](https://github.com/mrmlnc) | -| 3 | [DrPizza](https://github.com/DrPizza) | -| 2 | [Tvrqvoise](https://github.com/Tvrqvoise) | -| 2 | [antonyk](https://github.com/antonyk) | -| 2 | [MartinKolarik](https://github.com/MartinKolarik) | -| 2 | [Glazy](https://github.com/Glazy) | -| 2 | [mceIdo](https://github.com/mceIdo) | -| 2 | [TrySound](https://github.com/TrySound) | -| 1 | [yvele](https://github.com/yvele) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | -| 1 | [simlu](https://github.com/simlu) | -| 1 | [curbengh](https://github.com/curbengh) | -| 1 | [fidian](https://github.com/fidian) | -| 1 | [tomByrer](https://github.com/tomByrer) | -| 1 | [ZoomerTedJackson](https://github.com/ZoomerTedJackson) | -| 1 | [styfle](https://github.com/styfle) | -| 1 | [sebdeckers](https://github.com/sebdeckers) | -| 1 | [muescha](https://github.com/muescha) | -| 1 | [juszczykjakub](https://github.com/juszczykjakub) | -| 1 | [joyceerhl](https://github.com/joyceerhl) | -| 1 | [donatj](https://github.com/donatj) | -| 1 | [frangio](https://github.com/frangio) | -| 1 | [UltCombo](https://github.com/UltCombo) | -| 1 | [DianeLooney](https://github.com/DianeLooney) | -| 1 | [devongovett](https://github.com/devongovett) | -| 1 | [Cslove](https://github.com/Cslove) | -| 1 | [amilajack](https://github.com/amilajack) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2024, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on August 23, 2024._ \ No newline at end of file diff --git a/node_modules/micromatch/index.js b/node_modules/micromatch/index.js deleted file mode 100644 index cb9d9ef..0000000 --- a/node_modules/micromatch/index.js +++ /dev/null @@ -1,474 +0,0 @@ -'use strict'; - -const util = require('util'); -const braces = require('braces'); -const picomatch = require('picomatch'); -const utils = require('picomatch/lib/utils'); - -const isEmptyString = v => v === '' || v === './'; -const hasBraces = v => { - const index = v.indexOf('{'); - return index > -1 && v.indexOf('}', index) > -1; -}; - -/** - * Returns an array of strings that match one or more glob patterns. - * - * ```js - * const mm = require('micromatch'); - * // mm(list, patterns[, options]); - * - * console.log(mm(['a.js', 'a.txt'], ['*.js'])); - * //=> [ 'a.js' ] - * ``` - * @param {String|Array} `list` List of strings to match. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) - * @return {Array} Returns an array of matches - * @summary false - * @api public - */ - -const micromatch = (list, patterns, options) => { - patterns = [].concat(patterns); - list = [].concat(list); - - let omit = new Set(); - let keep = new Set(); - let items = new Set(); - let negatives = 0; - - let onResult = state => { - items.add(state.output); - if (options && options.onResult) { - options.onResult(state); - } - }; - - for (let i = 0; i < patterns.length; i++) { - let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); - let negated = isMatch.state.negated || isMatch.state.negatedExtglob; - if (negated) negatives++; - - for (let item of list) { - let matched = isMatch(item, true); - - let match = negated ? !matched.isMatch : matched.isMatch; - if (!match) continue; - - if (negated) { - omit.add(matched.output); - } else { - omit.delete(matched.output); - keep.add(matched.output); - } - } - } - - let result = negatives === patterns.length ? [...items] : [...keep]; - let matches = result.filter(item => !omit.has(item)); - - if (options && matches.length === 0) { - if (options.failglob === true) { - throw new Error(`No matches found for "${patterns.join(', ')}"`); - } - - if (options.nonull === true || options.nullglob === true) { - return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; - } - } - - return matches; -}; - -/** - * Backwards compatibility - */ - -micromatch.match = micromatch; - -/** - * Returns a matcher function from the given glob `pattern` and `options`. - * The returned function takes a string to match as its only argument and returns - * true if the string is a match. - * - * ```js - * const mm = require('micromatch'); - * // mm.matcher(pattern[, options]); - * - * const isMatch = mm.matcher('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @param {String} `pattern` Glob pattern - * @param {Object} `options` - * @return {Function} Returns a matcher function. - * @api public - */ - -micromatch.matcher = (pattern, options) => picomatch(pattern, options); - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const mm = require('micromatch'); - * // mm.isMatch(string, patterns[, options]); - * - * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(mm.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `[options]` See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Backwards compatibility - */ - -micromatch.any = micromatch.isMatch; - -/** - * Returns a list of strings that _**do not match any**_ of the given `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.not(list, patterns[, options]); - * - * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); - * //=> ['b.b', 'c.c'] - * ``` - * @param {Array} `list` Array of strings to match. - * @param {String|Array} `patterns` One or more glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array} Returns an array of strings that **do not match** the given patterns. - * @api public - */ - -micromatch.not = (list, patterns, options = {}) => { - patterns = [].concat(patterns).map(String); - let result = new Set(); - let items = []; - - let onResult = state => { - if (options.onResult) options.onResult(state); - items.push(state.output); - }; - - let matches = new Set(micromatch(list, patterns, { ...options, onResult })); - - for (let item of items) { - if (!matches.has(item)) { - result.add(item); - } - } - return [...result]; -}; - -/** - * Returns true if the given `string` contains the given pattern. Similar - * to [.isMatch](#isMatch) but the pattern can match any part of the string. - * - * ```js - * var mm = require('micromatch'); - * // mm.contains(string, pattern[, options]); - * - * console.log(mm.contains('aa/bb/cc', '*b')); - * //=> true - * console.log(mm.contains('aa/bb/cc', '*d')); - * //=> false - * ``` - * @param {String} `str` The string to match. - * @param {String|Array} `patterns` Glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any of the patterns matches any part of `str`. - * @api public - */ - -micromatch.contains = (str, pattern, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - if (Array.isArray(pattern)) { - return pattern.some(p => micromatch.contains(str, p, options)); - } - - if (typeof pattern === 'string') { - if (isEmptyString(str) || isEmptyString(pattern)) { - return false; - } - - if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { - return true; - } - } - - return micromatch.isMatch(str, pattern, { ...options, contains: true }); -}; - -/** - * Filter the keys of the given object with the given `glob` pattern - * and `options`. Does not attempt to match nested keys. If you need this feature, - * use [glob-object][] instead. - * - * ```js - * const mm = require('micromatch'); - * // mm.matchKeys(object, patterns[, options]); - * - * const obj = { aa: 'a', ab: 'b', ac: 'c' }; - * console.log(mm.matchKeys(obj, '*b')); - * //=> { ab: 'b' } - * ``` - * @param {Object} `object` The object with keys to filter. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Object} Returns an object with only keys that match the given patterns. - * @api public - */ - -micromatch.matchKeys = (obj, patterns, options) => { - if (!utils.isObject(obj)) { - throw new TypeError('Expected the first argument to be an object'); - } - let keys = micromatch(Object.keys(obj), patterns, options); - let res = {}; - for (let key of keys) res[key] = obj[key]; - return res; -}; - -/** - * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.some(list, patterns[, options]); - * - * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // true - * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` - * @api public - */ - -micromatch.some = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (items.some(item => isMatch(item))) { - return true; - } - } - return false; -}; - -/** - * Returns true if every string in the given `list` matches - * any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.every(list, patterns[, options]); - * - * console.log(mm.every('foo.js', ['foo.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // false - * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` - * @api public - */ - -micromatch.every = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (!items.every(item => isMatch(item))) { - return false; - } - } - return true; -}; - -/** - * Returns true if **all** of the given `patterns` match - * the specified string. - * - * ```js - * const mm = require('micromatch'); - * // mm.all(string, patterns[, options]); - * - * console.log(mm.all('foo.js', ['foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); - * // false - * - * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); - * // true - * ``` - * @param {String|Array} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.all = (str, patterns, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - return [].concat(patterns).every(p => picomatch(p, options)(str)); -}; - -/** - * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. - * - * ```js - * const mm = require('micromatch'); - * // mm.capture(pattern, string[, options]); - * - * console.log(mm.capture('test/*.js', 'test/foo.js')); - * //=> ['foo'] - * console.log(mm.capture('test/*.js', 'foo/bar.css')); - * //=> null - * ``` - * @param {String} `glob` Glob pattern to use for matching. - * @param {String} `input` String to match - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. - * @api public - */ - -micromatch.capture = (glob, input, options) => { - let posix = utils.isWindows(options); - let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); - let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); - - if (match) { - return match.slice(1).map(v => v === void 0 ? '' : v); - } -}; - -/** - * Create a regular expression from the given glob `pattern`. - * - * ```js - * const mm = require('micromatch'); - * // mm.makeRe(pattern[, options]); - * - * console.log(mm.makeRe('*.js')); - * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ - * ``` - * @param {String} `pattern` A glob pattern to convert to regex. - * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -micromatch.makeRe = (...args) => picomatch.makeRe(...args); - -/** - * Scan a glob pattern to separate the pattern into segments. Used - * by the [split](#split) method. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.scan(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -micromatch.scan = (...args) => picomatch.scan(...args); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.parse(pattern[, options]); - * ``` - * @param {String} `glob` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as regex source string. - * @api public - */ - -micromatch.parse = (patterns, options) => { - let res = []; - for (let pattern of [].concat(patterns || [])) { - for (let str of braces(String(pattern), options)) { - res.push(picomatch.parse(str, options)); - } - } - return res; -}; - -/** - * Process the given brace `pattern`. - * - * ```js - * const { braces } = require('micromatch'); - * console.log(braces('foo/{a,b,c}/bar')); - * //=> [ 'foo/(a|b|c)/bar' ] - * - * console.log(braces('foo/{a,b,c}/bar', { expand: true })); - * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] - * ``` - * @param {String} `pattern` String with brace pattern to process. - * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. - * @return {Array} - * @api public - */ - -micromatch.braces = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - if ((options && options.nobrace === true) || !hasBraces(pattern)) { - return [pattern]; - } - return braces(pattern, options); -}; - -/** - * Expand braces - */ - -micromatch.braceExpand = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - return micromatch.braces(pattern, { ...options, expand: true }); -}; - -/** - * Expose micromatch - */ - -// exposed for tests -micromatch.hasBraces = hasBraces; -module.exports = micromatch; diff --git a/node_modules/micromatch/package.json b/node_modules/micromatch/package.json deleted file mode 100644 index d5558bb..0000000 --- a/node_modules/micromatch/package.json +++ /dev/null @@ -1,119 +0,0 @@ -{ - "name": "micromatch", - "description": "Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch.", - "version": "4.0.8", - "homepage": "https://github.com/micromatch/micromatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "(https://github.com/DianeLooney)", - "Amila Welihinda (amilajack.com)", - "Bogdan Chadkin (https://github.com/TrySound)", - "Brian Woodward (https://twitter.com/doowb)", - "Devon Govett (http://badassjs.com)", - "Elan Shanker (https://github.com/es128)", - "Fabrício Matté (https://ultcombo.js.org)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Martin Kolárik (https://kolarik.sk)", - "Olsten Larck (https://i.am.charlike.online)", - "Paul Miller (paulmillr.com)", - "Tom Byrer (https://github.com/tomByrer)", - "Tyler Akins (http://rumkin.com)", - "Peter Bright (https://github.com/drpizza)", - "Kuba Juszczyk (https://github.com/ku8ar)" - ], - "repository": "micromatch/micromatch", - "bugs": { - "url": "https://github.com/micromatch/micromatch/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "devDependencies": { - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "minimatch": "^5.0.1", - "mocha": "^9.2.2", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "bash", - "bracket", - "character-class", - "expand", - "expansion", - "expression", - "extglob", - "extglobs", - "file", - "files", - "filter", - "find", - "glob", - "globbing", - "globs", - "globstar", - "lookahead", - "lookaround", - "lookbehind", - "match", - "matcher", - "matches", - "matching", - "micromatch", - "minimatch", - "multimatch", - "negate", - "negation", - "path", - "pattern", - "patterns", - "posix", - "regex", - "regexp", - "regular", - "shell", - "star", - "wildcard" - ], - "verb": { - "toc": "collapsible", - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "nanomatch" - ] - }, - "reflinks": [ - "extglob", - "fill-range", - "glob-object", - "minimatch", - "multimatch" - ] - } -} diff --git a/node_modules/nanoid/.devcontainer.json b/node_modules/nanoid/.devcontainer.json deleted file mode 100644 index 7fd5ba1..0000000 --- a/node_modules/nanoid/.devcontainer.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "image": "localhost/ai-opensource:latest", - "forwardPorts": [], - "mounts": [ - { - "source": "pnpm-store", - "target": "/home/ai/.local/share/pnpm/store", - "type": "volume" - }, - { - "source": "shell-history", - "target": "/home/ai/.local/share/history/", - "type": "volume" - } - ], - "workspaceMount": "", - "runArgs": [ - "--userns=keep-id:uid=1000,gid=1000", - "--volume=${localWorkspaceFolder}:/workspaces/${localWorkspaceFolderBasename}:Z", - "--network=host", - "--ulimit=host" - ] -} diff --git a/node_modules/nanoid/LICENSE b/node_modules/nanoid/LICENSE deleted file mode 100644 index 37f56aa..0000000 --- a/node_modules/nanoid/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright 2017 Andrey Sitnik - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nanoid/README.md b/node_modules/nanoid/README.md deleted file mode 100644 index f4c8c13..0000000 --- a/node_modules/nanoid/README.md +++ /dev/null @@ -1,554 +0,0 @@ -# Nano ID - -Nano ID logo by Anton Lovchikov - -**English** | [Русский](./README.ru.md) | [简体中文](./README.zh-CN.md) | [Bahasa Indonesia](./README.id-ID.md) - -A tiny, secure, URL-friendly, unique string ID generator for JavaScript. - -> “An amazing level of senseless perfectionism, -> which is simply impossible not to respect.” - -* **Small.** 130 bytes (minified and gzipped). No dependencies. - [Size Limit] controls the size. -* **Fast.** It is 2 times faster than UUID. -* **Safe.** It uses hardware random generator. Can be used in clusters. -* **Short IDs.** It uses a larger alphabet than UUID (`A-Za-z0-9_-`). - So ID size was reduced from 36 to 21 symbols. -* **Portable.** Nano ID was ported - to [20 programming languages](#other-programming-languages). - -```js -import { nanoid } from 'nanoid' -model.id = nanoid() //=> "V1StGXR8_Z5jdHi6B-myT" -``` - -Supports modern browsers, IE [with Babel], Node.js and React Native. - -[online tool]: https://gitpod.io/#https://github.com/ai/nanoid/ -[with Babel]: https://developer.epages.com/blog/coding/how-to-transpile-node-modules-with-babel-and-webpack-in-a-monorepo/ -[Size Limit]: https://github.com/ai/size-limit - - - Sponsored by Evil Martians - - -## Table of Contents - -* [Comparison with UUID](#comparison-with-uuid) -* [Benchmark](#benchmark) -* [Security](#security) -* [API](#api) - * [Blocking](#blocking) - * [Async](#async) - * [Non-Secure](#non-secure) - * [Custom Alphabet or Size](#custom-alphabet-or-size) - * [Custom Random Bytes Generator](#custom-random-bytes-generator) -* [Usage](#usage) - * [IE](#ie) - * [React](#react) - * [React Native](#react-native) - * [Rollup](#rollup) - * [PouchDB and CouchDB](#pouchdb-and-couchdb) - * [Mongoose](#mongoose) - * [Web Workers](#web-workers) - * [CLI](#cli) - * [Other Programming Languages](#other-programming-languages) -* [Tools](#tools) - - -## Comparison with UUID - -Nano ID is quite comparable to UUID v4 (random-based). -It has a similar number of random bits in the ID -(126 in Nano ID and 122 in UUID), so it has a similar collision probability: - -> For there to be a one in a billion chance of duplication, -> 103 trillion version 4 IDs must be generated. - -There are three main differences between Nano ID and UUID v4: - -1. Nano ID uses a bigger alphabet, so a similar number of random bits - are packed in just 21 symbols instead of 36. -2. Nano ID code is **4 times less** than `uuid/v4` package: - 130 bytes instead of 483. -3. Because of memory allocation tricks, Nano ID is **2 times** faster than UUID. - - -## Benchmark - -```rust -$ node ./test/benchmark.js -crypto.randomUUID 25,603,857 ops/sec -@napi-rs/uuid 9,973,819 ops/sec -uid/secure 8,234,798 ops/sec -@lukeed/uuid 7,464,706 ops/sec -nanoid 5,616,592 ops/sec -customAlphabet 3,115,207 ops/sec -uuid v4 1,535,753 ops/sec -secure-random-string 388,226 ops/sec -uid-safe.sync 363,489 ops/sec -cuid 187,343 ops/sec -shortid 45,758 ops/sec - -Async: -nanoid/async 96,094 ops/sec -async customAlphabet 97,184 ops/sec -async secure-random-string 92,794 ops/sec -uid-safe 90,684 ops/sec - -Non-secure: -uid 67,376,692 ops/sec -nanoid/non-secure 2,849,639 ops/sec -rndm 2,674,806 ops/sec -``` - -Test configuration: ThinkPad X1 Carbon Gen 9, Fedora 34, Node.js 16.10. - - -## Security - -*See a good article about random generators theory: -[Secure random values (in Node.js)]* - -* **Unpredictability.** Instead of using the unsafe `Math.random()`, Nano ID - uses the `crypto` module in Node.js and the Web Crypto API in browsers. - These modules use unpredictable hardware random generator. -* **Uniformity.** `random % alphabet` is a popular mistake to make when coding - an ID generator. The distribution will not be even; there will be a lower - chance for some symbols to appear compared to others. So, it will reduce - the number of tries when brute-forcing. Nano ID uses a [better algorithm] - and is tested for uniformity. - - Nano ID uniformity - -* **Well-documented:** all Nano ID hacks are documented. See comments - in [the source]. -* **Vulnerabilities:** to report a security vulnerability, please use - the [Tidelift security contact](https://tidelift.com/security). - Tidelift will coordinate the fix and disclosure. - -[Secure random values (in Node.js)]: https://gist.github.com/joepie91/7105003c3b26e65efcea63f3db82dfba -[better algorithm]: https://github.com/ai/nanoid/blob/main/index.js -[the source]: https://github.com/ai/nanoid/blob/main/index.js - - -## Install - -```bash -npm install --save nanoid -``` - -For quick hacks, you can load Nano ID from CDN. Though, it is not recommended -to be used in production because of the lower loading performance. - -```js -import { nanoid } from 'https://cdn.jsdelivr.net/npm/nanoid/nanoid.js' -``` - -Nano ID provides ES modules. You do not need to do anything to use Nano ID -as ESM in webpack, Rollup, Parcel, or Node.js. - -```js -import { nanoid } from 'nanoid' -``` - -In Node.js you can use CommonJS import: - -```js -const { nanoid } = require('nanoid') -``` - - -## API - -Nano ID has 3 APIs: normal (blocking), asynchronous, and non-secure. - -By default, Nano ID uses URL-friendly symbols (`A-Za-z0-9_-`) and returns an ID -with 21 characters (to have a collision probability similar to UUID v4). - - -### Blocking - -The safe and easiest way to use Nano ID. - -In rare cases could block CPU from other work while noise collection -for hardware random generator. - -```js -import { nanoid } from 'nanoid' -model.id = nanoid() //=> "V1StGXR8_Z5jdHi6B-myT" -``` - -If you want to reduce the ID size (and increase collisions probability), -you can pass the size as an argument. - -```js -nanoid(10) //=> "IRFa-VaY2b" -``` - -Don’t forget to check the safety of your ID size -in our [ID collision probability] calculator. - -You can also use a [custom alphabet](#custom-alphabet-or-size) -or a [random generator](#custom-random-bytes-generator). - -[ID collision probability]: https://zelark.github.io/nano-id-cc/ - - -### Async - -To generate hardware random bytes, CPU collects electromagnetic noise. -For most cases, entropy will be already collected. - -In the synchronous API during the noise collection, the CPU is busy and -cannot do anything useful (for instance, process another HTTP request). - -Using the asynchronous API of Nano ID, another code can run during -the entropy collection. - -```js -import { nanoid } from 'nanoid/async' - -async function createUser () { - user.id = await nanoid() -} -``` - -Read more about entropy collection in [`crypto.randomBytes`] docs. - -Unfortunately, you will lose Web Crypto API advantages in a browser -if you use the asynchronous API. So, currently, in the browser, you are limited -with either security (`nanoid`), asynchronous behavior (`nanoid/async`), -or non-secure behavior (`nanoid/non-secure`) that will be explained -in the next part of the documentation. - -[`crypto.randomBytes`]: https://nodejs.org/api/crypto.html#crypto_crypto_randombytes_size_callback - - -### Non-Secure - -By default, Nano ID uses hardware random bytes generation for security -and low collision probability. If you are not so concerned with security, -you can use the faster non-secure generator. - -```js -import { nanoid } from 'nanoid/non-secure' -const id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqLJ" -``` - - -### Custom Alphabet or Size - -`customAlphabet` allows you to create `nanoid` with your own alphabet -and ID size. - -```js -import { customAlphabet } from 'nanoid' -const nanoid = customAlphabet('1234567890abcdef', 10) -model.id = nanoid() //=> "4f90d13a42" -``` - -```js -import { customAlphabet } from 'nanoid/async' -const nanoid = customAlphabet('1234567890abcdef', 10) -async function createUser () { - user.id = await nanoid() -} -``` - -```js -import { customAlphabet } from 'nanoid/non-secure' -const nanoid = customAlphabet('1234567890abcdef', 10) -user.id = nanoid() -``` - -Check the safety of your custom alphabet and ID size in our -[ID collision probability] calculator. For more alphabets, check out the options -in [`nanoid-dictionary`]. - -Alphabet must contain 256 symbols or less. -Otherwise, the security of the internal generator algorithm is not guaranteed. - -In addition to setting a default size, you can change the ID size when calling -the function: - -```js -import { customAlphabet } from 'nanoid' -const nanoid = customAlphabet('1234567890abcdef', 10) -model.id = nanoid(5) //=> "f01a2" -``` - -[ID collision probability]: https://alex7kom.github.io/nano-nanoid-cc/ -[`nanoid-dictionary`]: https://github.com/CyberAP/nanoid-dictionary - - -### Custom Random Bytes Generator - -`customRandom` allows you to create a `nanoid` and replace alphabet -and the default random bytes generator. - -In this example, a seed-based generator is used: - -```js -import { customRandom } from 'nanoid' - -const rng = seedrandom(seed) -const nanoid = customRandom('abcdef', 10, size => { - return (new Uint8Array(size)).map(() => 256 * rng()) -}) - -nanoid() //=> "fbaefaadeb" -``` - -`random` callback must accept the array size and return an array -with random numbers. - -If you want to use the same URL-friendly symbols with `customRandom`, -you can get the default alphabet using the `urlAlphabet`. - -```js -const { customRandom, urlAlphabet } = require('nanoid') -const nanoid = customRandom(urlAlphabet, 10, random) -``` - -Asynchronous and non-secure APIs are not available for `customRandom`. - -Note, that between Nano ID versions we may change random generator -call sequence. If you are using seed-based generators, we do not guarantee -the same result. - - -## Usage - -### IE - -If you support IE, you need to [transpile `node_modules`] by Babel -and add `crypto` alias. Moreover, `UInt8Array` in IE actually -is not an array and to cope with it, you have to convert it to an array -manually: - -```js -// polyfills.js -if (!window.crypto && window.msCrypto) { - window.crypto = window.msCrypto - - const getRandomValuesDef = window.crypto.getRandomValues - - window.crypto.getRandomValues = function (array) { - const values = getRandomValuesDef.call(window.crypto, array) - const result = [] - - for (let i = 0; i < array.length; i++) { - result[i] = values[i]; - } - - return result - }; -} -``` - -```js -import './polyfills.js' -import { nanoid } from 'nanoid' -``` - -[transpile `node_modules`]: https://developer.epages.com/blog/coding/how-to-transpile-node-modules-with-babel-and-webpack-in-a-monorepo/ - - -### React - -There’s no correct way to use Nano ID for React `key` prop -since it should be consistent among renders. - -```jsx -function Todos({todos}) { - return ( -
    - {todos.map(todo => ( -
  • /* DON’T DO IT */ - {todo.text} -
  • - ))} -
- ) -} -``` - -You should rather try to reach for stable ID inside your list item. - -```jsx -const todoItems = todos.map((todo) => -
  • - {todo.text} -
  • -) -``` - -In case you don’t have stable IDs you'd rather use index as `key` -instead of `nanoid()`: - -```jsx -const todoItems = todos.map((text, index) => -
  • /* Still not recommended but preferred over nanoid(). - Only do this if items have no stable IDs. */ - {text} -
  • -) -``` - - -### React Native - -React Native does not have built-in random generator. The following polyfill -works for plain React Native and Expo starting with `39.x`. - -1. Check [`react-native-get-random-values`] docs and install it. -2. Import it before Nano ID. - -```js -import 'react-native-get-random-values' -import { nanoid } from 'nanoid' -``` - -[`react-native-get-random-values`]: https://github.com/LinusU/react-native-get-random-values - - -### Rollup - -For Rollup you will need [`@rollup/plugin-node-resolve`] to bundle browser version -of this library.: - -```js - plugins: [ - nodeResolve({ - browser: true - }) - ] -``` - -[`@rollup/plugin-node-resolve`]: https://github.com/rollup/plugins/tree/master/packages/node-resolve - - -### PouchDB and CouchDB - -In PouchDB and CouchDB, IDs can’t start with an underscore `_`. -A prefix is required to prevent this issue, as Nano ID might use a `_` -at the start of the ID by default. - -Override the default ID with the following option: - -```js -db.put({ - _id: 'id' + nanoid(), - … -}) -``` - - -### Mongoose - -```js -const mySchema = new Schema({ - _id: { - type: String, - default: () => nanoid() - } -}) -``` - - -### Web Workers - -Web Workers do not have access to a secure random generator. - -Security is important in IDs when IDs should be unpredictable. -For instance, in "access by URL" link generation. -If you do not need unpredictable IDs, but you need to use Web Workers, -you can use the non‑secure ID generator. - -```js -import { nanoid } from 'nanoid/non-secure' -nanoid() //=> "Uakgb_J5m9g-0JDMbcJqLJ" -``` - -Note: non-secure IDs are more prone to collision attacks. - - -### CLI - -You can get unique ID in terminal by calling `npx nanoid`. You need only -Node.js in the system. You do not need Nano ID to be installed anywhere. - -```sh -$ npx nanoid -npx: installed 1 in 0.63s -LZfXLFzPPR4NNrgjlWDxn -``` - -Size of generated ID can be specified with `--size` (or `-s`) option: - -```sh -$ npx nanoid --size 10 -L3til0JS4z -``` - -Custom alphabet can be specified with `--alphabet` (or `-a`) option -(note that in this case `--size` is required): - -```sh -$ npx nanoid --alphabet abc --size 15 -bccbcabaabaccab -``` - -### Other Programming Languages - -Nano ID was ported to many languages. You can use these ports to have -the same ID generator on the client and server side. - -* [C#](https://github.com/codeyu/nanoid-net) -* [C++](https://github.com/mcmikecreations/nanoid_cpp) -* [Clojure and ClojureScript](https://github.com/zelark/nano-id) -* [ColdFusion/CFML](https://github.com/JamoCA/cfml-nanoid) -* [Crystal](https://github.com/mamantoha/nanoid.cr) -* [Dart & Flutter](https://github.com/pd4d10/nanoid-dart) -* [Deno](https://github.com/ianfabs/nanoid) -* [Go](https://github.com/matoous/go-nanoid) -* [Elixir](https://github.com/railsmechanic/nanoid) -* [Haskell](https://github.com/MichelBoucey/NanoID) -* [Janet](https://sr.ht/~statianzo/janet-nanoid/) -* [Java](https://github.com/aventrix/jnanoid) -* [Nim](https://github.com/icyphox/nanoid.nim) -* [OCaml](https://github.com/routineco/ocaml-nanoid) -* [Perl](https://github.com/tkzwtks/Nanoid-perl) -* [PHP](https://github.com/hidehalo/nanoid-php) -* [Python](https://github.com/puyuan/py-nanoid) - with [dictionaries](https://pypi.org/project/nanoid-dictionary) -* [Postgres Extension](https://github.com/spa5k/uids-postgres) -* [R](https://github.com/hrbrmstr/nanoid) (with dictionaries) -* [Ruby](https://github.com/radeno/nanoid.rb) -* [Rust](https://github.com/nikolay-govorov/nanoid) -* [Swift](https://github.com/antiflasher/NanoID) -* [Unison](https://share.unison-lang.org/latest/namespaces/hojberg/nanoid) -* [V](https://github.com/invipal/nanoid) -* [Zig](https://github.com/SasLuca/zig-nanoid) - -For other environments, [CLI] is available to generate IDs from a command line. - -[CLI]: #cli - - -## Tools - -* [ID size calculator] shows collision probability when adjusting - the ID alphabet or size. -* [`nanoid-dictionary`] with popular alphabets to use with [`customAlphabet`]. -* [`nanoid-good`] to be sure that your ID doesn’t contain any obscene words. - -[`nanoid-dictionary`]: https://github.com/CyberAP/nanoid-dictionary -[ID size calculator]: https://zelark.github.io/nano-id-cc/ -[`customAlphabet`]: #custom-alphabet-or-size -[`nanoid-good`]: https://github.com/y-gagar1n/nanoid-good diff --git a/node_modules/nanoid/async/index.browser.cjs b/node_modules/nanoid/async/index.browser.cjs deleted file mode 100644 index 80d1871..0000000 --- a/node_modules/nanoid/async/index.browser.cjs +++ /dev/null @@ -1,69 +0,0 @@ -let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) - -let customAlphabet = (alphabet, defaultSize = 21) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - // `Math.clz32` is not used, because it is not available in browsers. - let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - - // `-~f => Math.ceil(f)` if f is a float - // `-~i => i + 1` if i is an integer - let step = -~((1.6 * mask * defaultSize) / alphabet.length) - - return async (size = defaultSize) => { - let id = '' - while (true) { - let bytes = crypto.getRandomValues(new Uint8Array(step)) - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = step | 0 - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let nanoid = async (size = 21) => { - let id = '' - let bytes = crypto.getRandomValues(new Uint8Array((size |= 0))) - - // A compact alternative for `for (var i = 0; i < step; i++)`. - while (size--) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - let byte = bytes[size] & 63 - if (byte < 36) { - // `0-9a-z` - id += byte.toString(36) - } else if (byte < 62) { - // `A-Z` - id += (byte - 26).toString(36).toUpperCase() - } else if (byte < 63) { - id += '_' - } else { - id += '-' - } - } - return id -} - -module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.browser.js b/node_modules/nanoid/async/index.browser.js deleted file mode 100644 index aeffb3f..0000000 --- a/node_modules/nanoid/async/index.browser.js +++ /dev/null @@ -1,69 +0,0 @@ -let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) - -let customAlphabet = (alphabet, defaultSize = 21) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - // `Math.clz32` is not used, because it is not available in browsers. - let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - - // `-~f => Math.ceil(f)` if f is a float - // `-~i => i + 1` if i is an integer - let step = -~((1.6 * mask * defaultSize) / alphabet.length) - - return async (size = defaultSize) => { - let id = '' - while (true) { - let bytes = crypto.getRandomValues(new Uint8Array(step)) - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = step | 0 - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let nanoid = async (size = 21) => { - let id = '' - let bytes = crypto.getRandomValues(new Uint8Array((size |= 0))) - - // A compact alternative for `for (var i = 0; i < step; i++)`. - while (size--) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - let byte = bytes[size] & 63 - if (byte < 36) { - // `0-9a-z` - id += byte.toString(36) - } else if (byte < 62) { - // `A-Z` - id += (byte - 26).toString(36).toUpperCase() - } else if (byte < 63) { - id += '_' - } else { - id += '-' - } - } - return id -} - -export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.cjs b/node_modules/nanoid/async/index.cjs deleted file mode 100644 index f1b0ad0..0000000 --- a/node_modules/nanoid/async/index.cjs +++ /dev/null @@ -1,71 +0,0 @@ -let crypto = require('crypto') - -let { urlAlphabet } = require('../url-alphabet/index.cjs') - -// `crypto.randomFill()` is a little faster than `crypto.randomBytes()`, -// because it is possible to use in combination with `Buffer.allocUnsafe()`. -let random = bytes => - new Promise((resolve, reject) => { - // `Buffer.allocUnsafe()` is faster because it doesn’t flush the memory. - // Memory flushing is unnecessary since the buffer allocation itself resets - // the memory with the new bytes. - crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { - if (err) { - reject(err) - } else { - resolve(buf) - } - }) - }) - -let customAlphabet = (alphabet, defaultSize = 21) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) - - let tick = (id, size = defaultSize) => - random(step).then(bytes => { - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = step - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length >= size) return id - } - return tick(id, size) - }) - - return size => tick('', size) -} - -let nanoid = (size = 21) => - random((size |= 0)).then(bytes => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - while (size--) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - id += urlAlphabet[bytes[size] & 63] - } - return id - }) - -module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.d.ts b/node_modules/nanoid/async/index.d.ts deleted file mode 100644 index 9e91965..0000000 --- a/node_modules/nanoid/async/index.d.ts +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Generate secure URL-friendly unique ID. The non-blocking version. - * - * By default, the ID will have 21 symbols to have a collision probability - * similar to UUID v4. - * - * ```js - * import { nanoid } from 'nanoid/async' - * nanoid().then(id => { - * model.id = id - * }) - * ``` - * - * @param size Size of the ID. The default size is 21. - * @returns A promise with a random string. - */ -export function nanoid(size?: number): Promise - -/** - * A low-level function. - * Generate secure unique ID with custom alphabet. The non-blocking version. - * - * Alphabet must contain 256 symbols or less. Otherwise, the generator - * will not be secure. - * - * @param alphabet Alphabet used to generate the ID. - * @param defaultSize Size of the ID. The default size is 21. - * @returns A function that returns a promise with a random string. - * - * ```js - * import { customAlphabet } from 'nanoid/async' - * const nanoid = customAlphabet('0123456789абвгдеё', 5) - * nanoid().then(id => { - * model.id = id //=> "8ё56а" - * }) - * ``` - */ -export function customAlphabet( - alphabet: string, - defaultSize?: number -): (size?: number) => Promise - -/** - * Generate an array of random bytes collected from hardware noise. - * - * ```js - * import { random } from 'nanoid/async' - * random(5).then(bytes => { - * bytes //=> [10, 67, 212, 67, 89] - * }) - * ``` - * - * @param bytes Size of the array. - * @returns A promise with a random bytes array. - */ -export function random(bytes: number): Promise diff --git a/node_modules/nanoid/async/index.js b/node_modules/nanoid/async/index.js deleted file mode 100644 index 7f2eae9..0000000 --- a/node_modules/nanoid/async/index.js +++ /dev/null @@ -1,71 +0,0 @@ -import crypto from 'crypto' - -import { urlAlphabet } from '../url-alphabet/index.js' - -// `crypto.randomFill()` is a little faster than `crypto.randomBytes()`, -// because it is possible to use in combination with `Buffer.allocUnsafe()`. -let random = bytes => - new Promise((resolve, reject) => { - // `Buffer.allocUnsafe()` is faster because it doesn’t flush the memory. - // Memory flushing is unnecessary since the buffer allocation itself resets - // the memory with the new bytes. - crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { - if (err) { - reject(err) - } else { - resolve(buf) - } - }) - }) - -let customAlphabet = (alphabet, defaultSize = 21) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) - - let tick = (id, size = defaultSize) => - random(step).then(bytes => { - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = step - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length >= size) return id - } - return tick(id, size) - }) - - return size => tick('', size) -} - -let nanoid = (size = 21) => - random((size |= 0)).then(bytes => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - while (size--) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - id += urlAlphabet[bytes[size] & 63] - } - return id - }) - -export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.native.js b/node_modules/nanoid/async/index.native.js deleted file mode 100644 index a765de9..0000000 --- a/node_modules/nanoid/async/index.native.js +++ /dev/null @@ -1,57 +0,0 @@ -import { getRandomBytesAsync } from 'expo-random' - -import { urlAlphabet } from '../url-alphabet/index.js' - -let random = getRandomBytesAsync - -let customAlphabet = (alphabet, defaultSize = 21) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) - - let tick = (id, size = defaultSize) => - random(step).then(bytes => { - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = step - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length >= size) return id - } - return tick(id, size) - }) - - return size => tick('', size) -} - -let nanoid = (size = 21) => - random((size |= 0)).then(bytes => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - while (size--) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - id += urlAlphabet[bytes[size] & 63] - } - return id - }) - -export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/package.json b/node_modules/nanoid/async/package.json deleted file mode 100644 index 578cdb4..0000000 --- a/node_modules/nanoid/async/package.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "module", - "main": "index.cjs", - "module": "index.js", - "react-native": { - "./index.js": "./index.native.js" - }, - "browser": { - "./index.js": "./index.browser.js", - "./index.cjs": "./index.browser.cjs" - } -} \ No newline at end of file diff --git a/node_modules/nanoid/bin/nanoid.cjs b/node_modules/nanoid/bin/nanoid.cjs deleted file mode 100755 index c76db0f..0000000 --- a/node_modules/nanoid/bin/nanoid.cjs +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env node - -let { nanoid, customAlphabet } = require('..') - -function print(msg) { - process.stdout.write(msg + '\n') -} - -function error(msg) { - process.stderr.write(msg + '\n') - process.exit(1) -} - -if (process.argv.includes('--help') || process.argv.includes('-h')) { - print(` - Usage - $ nanoid [options] - - Options - -s, --size Generated ID size - -a, --alphabet Alphabet to use - -h, --help Show this help - - Examples - $ nanoid --s 15 - S9sBF77U6sDB8Yg - - $ nanoid --size 10 --alphabet abc - bcabababca`) - process.exit() -} - -let alphabet, size -for (let i = 2; i < process.argv.length; i++) { - let arg = process.argv[i] - if (arg === '--size' || arg === '-s') { - size = Number(process.argv[i + 1]) - i += 1 - if (Number.isNaN(size) || size <= 0) { - error('Size must be positive integer') - } - } else if (arg === '--alphabet' || arg === '-a') { - alphabet = process.argv[i + 1] - i += 1 - } else { - error('Unknown argument ' + arg) - } -} - -if (alphabet) { - let customNanoid = customAlphabet(alphabet, size) - print(customNanoid()) -} else { - print(nanoid(size)) -} diff --git a/node_modules/nanoid/index.browser.cjs b/node_modules/nanoid/index.browser.cjs deleted file mode 100644 index d21a91f..0000000 --- a/node_modules/nanoid/index.browser.cjs +++ /dev/null @@ -1,72 +0,0 @@ -// This file replaces `index.js` in bundlers like webpack or Rollup, -// according to `browser` config in `package.json`. - -let { urlAlphabet } = require('./url-alphabet/index.cjs') - -let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) - -let customRandom = (alphabet, defaultSize, getRandom) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - // `Math.clz32` is not used, because it is not available in browsers. - let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - - // `-~f => Math.ceil(f)` if f is a float - // `-~i => i + 1` if i is an integer - let step = -~((1.6 * mask * defaultSize) / alphabet.length) - - return (size = defaultSize) => { - let id = '' - while (true) { - let bytes = getRandom(step) - // A compact alternative for `for (var i = 0; i < step; i++)`. - let j = step | 0 - while (j--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[j] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let customAlphabet = (alphabet, size = 21) => - customRandom(alphabet, size, random) - -let nanoid = (size = 21) => - crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - byte &= 63 - if (byte < 36) { - // `0-9a-z` - id += byte.toString(36) - } else if (byte < 62) { - // `A-Z` - id += (byte - 26).toString(36).toUpperCase() - } else if (byte > 62) { - id += '-' - } else { - id += '_' - } - return id - }, '') - -module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.browser.js b/node_modules/nanoid/index.browser.js deleted file mode 100644 index 732e504..0000000 --- a/node_modules/nanoid/index.browser.js +++ /dev/null @@ -1,72 +0,0 @@ -// This file replaces `index.js` in bundlers like webpack or Rollup, -// according to `browser` config in `package.json`. - -import { urlAlphabet } from './url-alphabet/index.js' - -let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) - -let customRandom = (alphabet, defaultSize, getRandom) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - // `Math.clz32` is not used, because it is not available in browsers. - let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - - // `-~f => Math.ceil(f)` if f is a float - // `-~i => i + 1` if i is an integer - let step = -~((1.6 * mask * defaultSize) / alphabet.length) - - return (size = defaultSize) => { - let id = '' - while (true) { - let bytes = getRandom(step) - // A compact alternative for `for (var i = 0; i < step; i++)`. - let j = step | 0 - while (j--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[j] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let customAlphabet = (alphabet, size = 21) => - customRandom(alphabet, size, random) - -let nanoid = (size = 21) => - crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - byte &= 63 - if (byte < 36) { - // `0-9a-z` - id += byte.toString(36) - } else if (byte < 62) { - // `A-Z` - id += (byte - 26).toString(36).toUpperCase() - } else if (byte > 62) { - id += '-' - } else { - id += '_' - } - return id - }, '') - -export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.cjs b/node_modules/nanoid/index.cjs deleted file mode 100644 index c20e374..0000000 --- a/node_modules/nanoid/index.cjs +++ /dev/null @@ -1,85 +0,0 @@ -let crypto = require('crypto') - -let { urlAlphabet } = require('./url-alphabet/index.cjs') - -// It is best to make fewer, larger requests to the crypto module to -// avoid system call overhead. So, random numbers are generated in a -// pool. The pool is a Buffer that is larger than the initial random -// request size by this multiplier. The pool is enlarged if subsequent -// requests exceed the maximum buffer size. -const POOL_SIZE_MULTIPLIER = 128 -let pool, poolOffset - -let fillPool = bytes => { - if (!pool || pool.length < bytes) { - pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) - crypto.randomFillSync(pool) - poolOffset = 0 - } else if (poolOffset + bytes > pool.length) { - crypto.randomFillSync(pool) - poolOffset = 0 - } - poolOffset += bytes -} - -let random = bytes => { - // `|=` convert `bytes` to number to prevent `valueOf` abusing and pool pollution - fillPool((bytes |= 0)) - return pool.subarray(poolOffset - bytes, poolOffset) -} - -let customRandom = (alphabet, defaultSize, getRandom) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) - - return (size = defaultSize) => { - let id = '' - while (true) { - let bytes = getRandom(step) - // A compact alternative for `for (let i = 0; i < step; i++)`. - let i = step - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let customAlphabet = (alphabet, size = 21) => - customRandom(alphabet, size, random) - -let nanoid = (size = 21) => { - // `|=` convert `size` to number to prevent `valueOf` abusing and pool pollution - fillPool((size |= 0)) - let id = '' - // We are reading directly from the random pool to avoid creating new array - for (let i = poolOffset - size; i < poolOffset; i++) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - id += urlAlphabet[pool[i] & 63] - } - return id -} - -module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.d.cts b/node_modules/nanoid/index.d.cts deleted file mode 100644 index 3e111a3..0000000 --- a/node_modules/nanoid/index.d.cts +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Generate secure URL-friendly unique ID. - * - * By default, the ID will have 21 symbols to have a collision probability - * similar to UUID v4. - * - * ```js - * import { nanoid } from 'nanoid' - * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" - * ``` - * - * @param size Size of the ID. The default size is 21. - * @returns A random string. - */ -export function nanoid(size?: number): string - -/** - * Generate secure unique ID with custom alphabet. - * - * Alphabet must contain 256 symbols or less. Otherwise, the generator - * will not be secure. - * - * @param alphabet Alphabet used to generate the ID. - * @param defaultSize Size of the ID. The default size is 21. - * @returns A random string generator. - * - * ```js - * const { customAlphabet } = require('nanoid') - * const nanoid = customAlphabet('0123456789абвгдеё', 5) - * nanoid() //=> "8ё56а" - * ``` - */ -export function customAlphabet( - alphabet: string, - defaultSize?: number -): (size?: number) => string - -/** - * Generate unique ID with custom random generator and alphabet. - * - * Alphabet must contain 256 symbols or less. Otherwise, the generator - * will not be secure. - * - * ```js - * import { customRandom } from 'nanoid/format' - * - * const nanoid = customRandom('abcdef', 5, size => { - * const random = [] - * for (let i = 0; i < size; i++) { - * random.push(randomByte()) - * } - * return random - * }) - * - * nanoid() //=> "fbaef" - * ``` - * - * @param alphabet Alphabet used to generate a random string. - * @param size Size of the random string. - * @param random A random bytes generator. - * @returns A random string generator. - */ -export function customRandom( - alphabet: string, - size: number, - random: (bytes: number) => Uint8Array -): () => string - -/** - * URL safe symbols. - * - * ```js - * import { urlAlphabet } from 'nanoid' - * const nanoid = customAlphabet(urlAlphabet, 10) - * nanoid() //=> "Uakgb_J5m9" - * ``` - */ -export const urlAlphabet: string - -/** - * Generate an array of random bytes collected from hardware noise. - * - * ```js - * import { customRandom, random } from 'nanoid' - * const nanoid = customRandom("abcdef", 5, random) - * ``` - * - * @param bytes Size of the array. - * @returns An array of random bytes. - */ -export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.d.ts b/node_modules/nanoid/index.d.ts deleted file mode 100644 index 3e111a3..0000000 --- a/node_modules/nanoid/index.d.ts +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Generate secure URL-friendly unique ID. - * - * By default, the ID will have 21 symbols to have a collision probability - * similar to UUID v4. - * - * ```js - * import { nanoid } from 'nanoid' - * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" - * ``` - * - * @param size Size of the ID. The default size is 21. - * @returns A random string. - */ -export function nanoid(size?: number): string - -/** - * Generate secure unique ID with custom alphabet. - * - * Alphabet must contain 256 symbols or less. Otherwise, the generator - * will not be secure. - * - * @param alphabet Alphabet used to generate the ID. - * @param defaultSize Size of the ID. The default size is 21. - * @returns A random string generator. - * - * ```js - * const { customAlphabet } = require('nanoid') - * const nanoid = customAlphabet('0123456789абвгдеё', 5) - * nanoid() //=> "8ё56а" - * ``` - */ -export function customAlphabet( - alphabet: string, - defaultSize?: number -): (size?: number) => string - -/** - * Generate unique ID with custom random generator and alphabet. - * - * Alphabet must contain 256 symbols or less. Otherwise, the generator - * will not be secure. - * - * ```js - * import { customRandom } from 'nanoid/format' - * - * const nanoid = customRandom('abcdef', 5, size => { - * const random = [] - * for (let i = 0; i < size; i++) { - * random.push(randomByte()) - * } - * return random - * }) - * - * nanoid() //=> "fbaef" - * ``` - * - * @param alphabet Alphabet used to generate a random string. - * @param size Size of the random string. - * @param random A random bytes generator. - * @returns A random string generator. - */ -export function customRandom( - alphabet: string, - size: number, - random: (bytes: number) => Uint8Array -): () => string - -/** - * URL safe symbols. - * - * ```js - * import { urlAlphabet } from 'nanoid' - * const nanoid = customAlphabet(urlAlphabet, 10) - * nanoid() //=> "Uakgb_J5m9" - * ``` - */ -export const urlAlphabet: string - -/** - * Generate an array of random bytes collected from hardware noise. - * - * ```js - * import { customRandom, random } from 'nanoid' - * const nanoid = customRandom("abcdef", 5, random) - * ``` - * - * @param bytes Size of the array. - * @returns An array of random bytes. - */ -export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.js b/node_modules/nanoid/index.js deleted file mode 100644 index 5203a4c..0000000 --- a/node_modules/nanoid/index.js +++ /dev/null @@ -1,85 +0,0 @@ -import crypto from 'crypto' - -import { urlAlphabet } from './url-alphabet/index.js' - -// It is best to make fewer, larger requests to the crypto module to -// avoid system call overhead. So, random numbers are generated in a -// pool. The pool is a Buffer that is larger than the initial random -// request size by this multiplier. The pool is enlarged if subsequent -// requests exceed the maximum buffer size. -const POOL_SIZE_MULTIPLIER = 128 -let pool, poolOffset - -let fillPool = bytes => { - if (!pool || pool.length < bytes) { - pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) - crypto.randomFillSync(pool) - poolOffset = 0 - } else if (poolOffset + bytes > pool.length) { - crypto.randomFillSync(pool) - poolOffset = 0 - } - poolOffset += bytes -} - -let random = bytes => { - // `|=` convert `bytes` to number to prevent `valueOf` abusing and pool pollution - fillPool((bytes |= 0)) - return pool.subarray(poolOffset - bytes, poolOffset) -} - -let customRandom = (alphabet, defaultSize, getRandom) => { - // First, a bitmask is necessary to generate the ID. The bitmask makes bytes - // values closer to the alphabet size. The bitmask calculates the closest - // `2^31 - 1` number, which exceeds the alphabet size. - // For example, the bitmask for the alphabet size 30 is 31 (00011111). - let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 - // Though, the bitmask solution is not perfect since the bytes exceeding - // the alphabet size are refused. Therefore, to reliably generate the ID, - // the random bytes redundancy has to be satisfied. - - // Note: every hardware random generator call is performance expensive, - // because the system call for entropy collection takes a lot of time. - // So, to avoid additional system calls, extra bytes are requested in advance. - - // Next, a step determines how many random bytes to generate. - // The number of random bytes gets decided upon the ID size, mask, - // alphabet size, and magic number 1.6 (using 1.6 peaks at performance - // according to benchmarks). - let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) - - return (size = defaultSize) => { - let id = '' - while (true) { - let bytes = getRandom(step) - // A compact alternative for `for (let i = 0; i < step; i++)`. - let i = step - while (i--) { - // Adding `|| ''` refuses a random byte that exceeds the alphabet size. - id += alphabet[bytes[i] & mask] || '' - if (id.length === size) return id - } - } - } -} - -let customAlphabet = (alphabet, size = 21) => - customRandom(alphabet, size, random) - -let nanoid = (size = 21) => { - // `|=` convert `size` to number to prevent `valueOf` abusing and pool pollution - fillPool((size |= 0)) - let id = '' - // We are reading directly from the random pool to avoid creating new array - for (let i = poolOffset - size; i < poolOffset; i++) { - // It is incorrect to use bytes exceeding the alphabet size. - // The following mask reduces the random byte in the 0-255 value - // range to the 0-63 value range. Therefore, adding hacks, such - // as empty string fallback or magic numbers, is unneccessary because - // the bitmask trims bytes down to the alphabet size. - id += urlAlphabet[pool[i] & 63] - } - return id -} - -export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/nanoid.js b/node_modules/nanoid/nanoid.js deleted file mode 100644 index ec242ea..0000000 --- a/node_modules/nanoid/nanoid.js +++ /dev/null @@ -1 +0,0 @@ -export let nanoid=(t=21)=>crypto.getRandomValues(new Uint8Array(t)).reduce(((t,e)=>t+=(e&=63)<36?e.toString(36):e<62?(e-26).toString(36).toUpperCase():e<63?"_":"-"),""); \ No newline at end of file diff --git a/node_modules/nanoid/non-secure/index.cjs b/node_modules/nanoid/non-secure/index.cjs deleted file mode 100644 index d51fcb6..0000000 --- a/node_modules/nanoid/non-secure/index.cjs +++ /dev/null @@ -1,34 +0,0 @@ -// This alphabet uses `A-Za-z0-9_-` symbols. -// The order of characters is optimized for better gzip and brotli compression. -// References to the same file (works both for gzip and brotli): -// `'use`, `andom`, and `rict'` -// References to the brotli default dictionary: -// `-26T`, `1983`, `40px`, `75px`, `bush`, `jack`, `mind`, `very`, and `wolf` -let urlAlphabet = - 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' - -let customAlphabet = (alphabet, defaultSize = 21) => { - return (size = defaultSize) => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = size | 0 - while (i--) { - // `| 0` is more compact and faster than `Math.floor()`. - id += alphabet[(Math.random() * alphabet.length) | 0] - } - return id - } -} - -let nanoid = (size = 21) => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = size | 0 - while (i--) { - // `| 0` is more compact and faster than `Math.floor()`. - id += urlAlphabet[(Math.random() * 64) | 0] - } - return id -} - -module.exports = { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/index.d.ts b/node_modules/nanoid/non-secure/index.d.ts deleted file mode 100644 index 4965322..0000000 --- a/node_modules/nanoid/non-secure/index.d.ts +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Generate URL-friendly unique ID. This method uses the non-secure - * predictable random generator with bigger collision probability. - * - * ```js - * import { nanoid } from 'nanoid/non-secure' - * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" - * ``` - * - * @param size Size of the ID. The default size is 21. - * @returns A random string. - */ -export function nanoid(size?: number): string - -/** - * Generate a unique ID based on a custom alphabet. - * This method uses the non-secure predictable random generator - * with bigger collision probability. - * - * @param alphabet Alphabet used to generate the ID. - * @param defaultSize Size of the ID. The default size is 21. - * @returns A random string generator. - * - * ```js - * import { customAlphabet } from 'nanoid/non-secure' - * const nanoid = customAlphabet('0123456789абвгдеё', 5) - * model.id = //=> "8ё56а" - * ``` - */ -export function customAlphabet( - alphabet: string, - defaultSize?: number -): (size?: number) => string diff --git a/node_modules/nanoid/non-secure/index.js b/node_modules/nanoid/non-secure/index.js deleted file mode 100644 index fcb3e25..0000000 --- a/node_modules/nanoid/non-secure/index.js +++ /dev/null @@ -1,34 +0,0 @@ -// This alphabet uses `A-Za-z0-9_-` symbols. -// The order of characters is optimized for better gzip and brotli compression. -// References to the same file (works both for gzip and brotli): -// `'use`, `andom`, and `rict'` -// References to the brotli default dictionary: -// `-26T`, `1983`, `40px`, `75px`, `bush`, `jack`, `mind`, `very`, and `wolf` -let urlAlphabet = - 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' - -let customAlphabet = (alphabet, defaultSize = 21) => { - return (size = defaultSize) => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = size | 0 - while (i--) { - // `| 0` is more compact and faster than `Math.floor()`. - id += alphabet[(Math.random() * alphabet.length) | 0] - } - return id - } -} - -let nanoid = (size = 21) => { - let id = '' - // A compact alternative for `for (var i = 0; i < step; i++)`. - let i = size | 0 - while (i--) { - // `| 0` is more compact and faster than `Math.floor()`. - id += urlAlphabet[(Math.random() * 64) | 0] - } - return id -} - -export { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/package.json b/node_modules/nanoid/non-secure/package.json deleted file mode 100644 index 9930d6a..0000000 --- a/node_modules/nanoid/non-secure/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "type": "module", - "main": "index.cjs", - "module": "index.js", - "react-native": "index.js" -} \ No newline at end of file diff --git a/node_modules/nanoid/package.json b/node_modules/nanoid/package.json deleted file mode 100644 index b238dca..0000000 --- a/node_modules/nanoid/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "nanoid", - "version": "3.3.8", - "description": "A tiny (116 bytes), secure URL-friendly unique string ID generator", - "keywords": [ - "uuid", - "random", - "id", - "url" - ], - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "author": "Andrey Sitnik ", - "license": "MIT", - "repository": "ai/nanoid", - "browser": { - "./index.js": "./index.browser.js", - "./async/index.js": "./async/index.browser.js", - "./async/index.cjs": "./async/index.browser.cjs", - "./index.cjs": "./index.browser.cjs" - }, - "react-native": "index.js", - "bin": "./bin/nanoid.cjs", - "sideEffects": false, - "types": "./index.d.ts", - "type": "module", - "main": "index.cjs", - "module": "index.js", - "exports": { - ".": { - "browser": "./index.browser.js", - "require": { - "types": "./index.d.cts", - "default": "./index.cjs" - }, - "import": { - "types": "./index.d.ts", - "default": "./index.js" - }, - "default": "./index.js" - }, - "./package.json": "./package.json", - "./async/package.json": "./async/package.json", - "./async": { - "browser": "./async/index.browser.js", - "require": { - "types": "./index.d.cts", - "default": "./async/index.cjs" - }, - "import": { - "types": "./index.d.ts", - "default": "./async/index.js" - }, - "default": "./async/index.js" - }, - "./non-secure/package.json": "./non-secure/package.json", - "./non-secure": { - "require": { - "types": "./index.d.cts", - "default": "./non-secure/index.cjs" - }, - "import": { - "types": "./index.d.ts", - "default": "./non-secure/index.js" - }, - "default": "./non-secure/index.js" - }, - "./url-alphabet/package.json": "./url-alphabet/package.json", - "./url-alphabet": { - "require": { - "types": "./index.d.cts", - "default": "./url-alphabet/index.cjs" - }, - "import": { - "types": "./index.d.ts", - "default": "./url-alphabet/index.js" - }, - "default": "./url-alphabet/index.js" - } - } -} \ No newline at end of file diff --git a/node_modules/nanoid/url-alphabet/index.cjs b/node_modules/nanoid/url-alphabet/index.cjs deleted file mode 100644 index a332f0b..0000000 --- a/node_modules/nanoid/url-alphabet/index.cjs +++ /dev/null @@ -1,7 +0,0 @@ -// This alphabet uses `A-Za-z0-9_-` symbols. -// The order of characters is optimized for better gzip and brotli compression. -// Same as in non-secure/index.js -let urlAlphabet = - 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' - -module.exports = { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/index.js b/node_modules/nanoid/url-alphabet/index.js deleted file mode 100644 index 27efec8..0000000 --- a/node_modules/nanoid/url-alphabet/index.js +++ /dev/null @@ -1,7 +0,0 @@ -// This alphabet uses `A-Za-z0-9_-` symbols. -// The order of characters is optimized for better gzip and brotli compression. -// Same as in non-secure/index.js -let urlAlphabet = - 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' - -export { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/package.json b/node_modules/nanoid/url-alphabet/package.json deleted file mode 100644 index 9930d6a..0000000 --- a/node_modules/nanoid/url-alphabet/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "type": "module", - "main": "index.cjs", - "module": "index.js", - "react-native": "index.js" -} \ No newline at end of file diff --git a/node_modules/normalize-path/LICENSE b/node_modules/normalize-path/LICENSE deleted file mode 100644 index d32ab44..0000000 --- a/node_modules/normalize-path/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2018, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/normalize-path/README.md b/node_modules/normalize-path/README.md deleted file mode 100644 index 726d4d6..0000000 --- a/node_modules/normalize-path/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# normalize-path [![NPM version](https://img.shields.io/npm/v/normalize-path.svg?style=flat)](https://www.npmjs.com/package/normalize-path) [![NPM monthly downloads](https://img.shields.io/npm/dm/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![NPM total downloads](https://img.shields.io/npm/dt/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/normalize-path.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/normalize-path) - -> Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save normalize-path -``` - -## Usage - -```js -const normalize = require('normalize-path'); - -console.log(normalize('\\foo\\bar\\baz\\')); -//=> '/foo/bar/baz' -``` - -**win32 namespaces** - -```js -console.log(normalize('\\\\?\\UNC\\Server01\\user\\docs\\Letter.txt')); -//=> '//?/UNC/Server01/user/docs/Letter.txt' - -console.log(normalize('\\\\.\\CdRomX')); -//=> '//./CdRomX' -``` - -**Consecutive slashes** - -Condenses multiple consecutive forward slashes (except for leading slashes in win32 namespaces) to a single slash. - -```js -console.log(normalize('.//foo//bar///////baz/')); -//=> './foo/bar/baz' -``` - -### Trailing slashes - -By default trailing slashes are removed. Pass `false` as the last argument to disable this behavior and _**keep** trailing slashes_: - -```js -console.log(normalize('foo\\bar\\baz\\', false)); //=> 'foo/bar/baz/' -console.log(normalize('./foo/bar/baz/', false)); //=> './foo/bar/baz/' -``` - -## Release history - -### v3.0 - -No breaking changes in this release. - -* a check was added to ensure that [win32 namespaces](https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces) are handled properly by win32 `path.parse()` after a path has been normalized by this library. -* a minor optimization was made to simplify how the trailing separator was handled - -## About - -
    -Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
    - -
    -Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
    - -
    -Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
    - -### Related projects - -Other useful path-related libraries: - -* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path "Return true if a file path contains the given path.") -* [is-absolute](https://www.npmjs.com/package/is-absolute): Returns true if a file path is absolute. Does not rely on the path module… [more](https://github.com/jonschlinkert/is-absolute) | [homepage](https://github.com/jonschlinkert/is-absolute "Returns true if a file path is absolute. Does not rely on the path module and can be used as a polyfill for node.js native `path.isAbolute`.") -* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.") -* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Pollyfill for node.js `path.parse`, parses a filepath into an object. | [homepage](https://github.com/jonschlinkert/parse-filepath "Pollyfill for node.js `path.parse`, parses a filepath into an object.") -* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with "Return `true` if a file path ends with the given string/suffix.") -* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify "Convert Windows file paths to unix paths.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 35 | [jonschlinkert](https://github.com/jonschlinkert) | -| 1 | [phated](https://github.com/phated) | - -### Author - -**Jon Schlinkert** - -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) - -### License - -Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on April 19, 2018._ \ No newline at end of file diff --git a/node_modules/normalize-path/index.js b/node_modules/normalize-path/index.js deleted file mode 100644 index 6fac553..0000000 --- a/node_modules/normalize-path/index.js +++ /dev/null @@ -1,35 +0,0 @@ -/*! - * normalize-path - * - * Copyright (c) 2014-2018, Jon Schlinkert. - * Released under the MIT License. - */ - -module.exports = function(path, stripTrailing) { - if (typeof path !== 'string') { - throw new TypeError('expected path to be a string'); - } - - if (path === '\\' || path === '/') return '/'; - - var len = path.length; - if (len <= 1) return path; - - // ensure that win32 namespaces has two leading slashes, so that the path is - // handled properly by the win32 version of path.parse() after being normalized - // https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces - var prefix = ''; - if (len > 4 && path[3] === '\\') { - var ch = path[2]; - if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { - path = path.slice(2); - prefix = '//'; - } - } - - var segs = path.split(/[/\\]+/); - if (stripTrailing !== false && segs[segs.length - 1] === '') { - segs.pop(); - } - return prefix + segs.join('/'); -}; diff --git a/node_modules/normalize-path/package.json b/node_modules/normalize-path/package.json deleted file mode 100644 index ad61098..0000000 --- a/node_modules/normalize-path/package.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "normalize-path", - "description": "Normalize slashes in a file path to be posix/unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes, unless disabled.", - "version": "3.0.0", - "homepage": "https://github.com/jonschlinkert/normalize-path", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Blaine Bublitz (https://twitter.com/BlaineBublitz)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "jonschlinkert/normalize-path", - "bugs": { - "url": "https://github.com/jonschlinkert/normalize-path/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "gulp-format-md": "^1.0.0", - "minimist": "^1.2.0", - "mocha": "^3.5.3" - }, - "keywords": [ - "absolute", - "backslash", - "delimiter", - "file", - "file-path", - "filepath", - "fix", - "forward", - "fp", - "fs", - "normalize", - "path", - "relative", - "separator", - "slash", - "slashes", - "trailing", - "unix", - "urix" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "related": { - "description": "Other useful path-related libraries:", - "list": [ - "contains-path", - "is-absolute", - "is-relative", - "parse-filepath", - "path-ends-with", - "path-ends-with", - "unixify" - ] - }, - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/path-type/index.d.ts b/node_modules/path-type/index.d.ts deleted file mode 100644 index fa742ad..0000000 --- a/node_modules/path-type/index.d.ts +++ /dev/null @@ -1,51 +0,0 @@ -export type PathTypeFunction = (path: string) => Promise; - -/** -Check whether the passed `path` is a file. - -@param path - The path to check. -@returns Whether the `path` is a file. -*/ -export const isFile: PathTypeFunction; - -/** -Check whether the passed `path` is a directory. - -@param path - The path to check. -@returns Whether the `path` is a directory. -*/ -export const isDirectory: PathTypeFunction; - -/** -Check whether the passed `path` is a symlink. - -@param path - The path to check. -@returns Whether the `path` is a symlink. -*/ -export const isSymlink: PathTypeFunction; - -export type PathTypeSyncFunction = (path: string) => boolean; - -/** -Synchronously check whether the passed `path` is a file. - -@param path - The path to check. -@returns Whether the `path` is a file. -*/ -export const isFileSync: PathTypeSyncFunction; - -/** -Synchronously check whether the passed `path` is a directory. - -@param path - The path to check. -@returns Whether the `path` is a directory. -*/ -export const isDirectorySync: PathTypeSyncFunction; - -/** -Synchronously check whether the passed `path` is a symlink. - -@param path - The path to check. -@returns Whether the `path` is a directory. -*/ -export const isSymlinkSync: PathTypeSyncFunction; diff --git a/node_modules/path-type/index.js b/node_modules/path-type/index.js deleted file mode 100644 index b43688d..0000000 --- a/node_modules/path-type/index.js +++ /dev/null @@ -1,41 +0,0 @@ -import fs, {promises as fsPromises} from 'fs'; - -async function isType(fsStatType, statsMethodName, filePath) { - if (typeof filePath !== 'string') { - throw new TypeError(`Expected a string, got ${typeof filePath}`); - } - - try { - const stats = await fsPromises[fsStatType](filePath); - return stats[statsMethodName](); - } catch (error) { - if (error.code === 'ENOENT') { - return false; - } - - throw error; - } -} - -function isTypeSync(fsStatType, statsMethodName, filePath) { - if (typeof filePath !== 'string') { - throw new TypeError(`Expected a string, got ${typeof filePath}`); - } - - try { - return fs[fsStatType](filePath)[statsMethodName](); - } catch (error) { - if (error.code === 'ENOENT') { - return false; - } - - throw error; - } -} - -export const isFile = isType.bind(null, 'stat', 'isFile'); -export const isDirectory = isType.bind(null, 'stat', 'isDirectory'); -export const isSymlink = isType.bind(null, 'lstat', 'isSymbolicLink'); -export const isFileSync = isTypeSync.bind(null, 'statSync', 'isFile'); -export const isDirectorySync = isTypeSync.bind(null, 'statSync', 'isDirectory'); -export const isSymlinkSync = isTypeSync.bind(null, 'lstatSync', 'isSymbolicLink'); diff --git a/node_modules/path-type/license b/node_modules/path-type/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/path-type/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/path-type/package.json b/node_modules/path-type/package.json deleted file mode 100644 index 15b59fd..0000000 --- a/node_modules/path-type/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "path-type", - "version": "5.0.0", - "description": "Check if a path is a file, directory, or symlink", - "license": "MIT", - "repository": "sindresorhus/path-type", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": "./index.js", - "engines": { - "node": ">=12" - }, - "scripts": { - "test": "xo && nyc ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "path", - "fs", - "type", - "is", - "check", - "directory", - "file", - "filepath", - "symlink", - "symbolic", - "link", - "stat", - "stats", - "filesystem" - ], - "devDependencies": { - "ava": "^3.15.0", - "nyc": "^15.1.0", - "tsd": "^0.14.0", - "xo": "^0.37.1" - } -} diff --git a/node_modules/path-type/readme.md b/node_modules/path-type/readme.md deleted file mode 100644 index 85c5129..0000000 --- a/node_modules/path-type/readme.md +++ /dev/null @@ -1,74 +0,0 @@ -# path-type - -> Check if a path is a file, directory, or symlink - -## Install - -``` -$ npm install path-type -``` - -## Usage - -```js -import {isFile} from 'path-type'; - -console.log(await isFile('package.json')); -//=> true -``` - -## API - -### isFile(path) - -Check whether the passed `path` is a file. - -Returns a `Promise`. - -#### path - -Type: `string` - -The path to check. - -### isDirectory(path) - -Check whether the passed `path` is a directory. - -Returns a `Promise`. - -### isSymlink(path) - -Check whether the passed `path` is a symlink. - -Returns a `Promise`. - -### isFileSync(path) - -Synchronously check whether the passed `path` is a file. - -Returns a `boolean`. - -### isDirectorySync(path) - -Synchronously check whether the passed `path` is a directory. - -Returns a `boolean`. - -### isSymlinkSync(path) - -Synchronously check whether the passed `path` is a symlink. - -Returns a `boolean`. - ---- - -
    - - Get professional support for this package with a Tidelift subscription - -
    - - Tidelift helps make open source sustainable for maintainers while giving companies
    assurances about security, maintenance, and licensing for their dependencies. -
    -
    diff --git a/node_modules/picocolors/LICENSE b/node_modules/picocolors/LICENSE deleted file mode 100644 index 46c9b95..0000000 --- a/node_modules/picocolors/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -ISC License - -Copyright (c) 2021-2024 Oleksii Raspopov, Kostiantyn Denysov, Anton Verinov - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/picocolors/README.md b/node_modules/picocolors/README.md deleted file mode 100644 index 8e47aa8..0000000 --- a/node_modules/picocolors/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# picocolors - -The tiniest and the fastest library for terminal output formatting with ANSI colors. - -```javascript -import pc from "picocolors" - -console.log( - pc.green(`How are ${pc.italic(`you`)} doing?`) -) -``` - -- **No dependencies.** -- **14 times** smaller and **2 times** faster than chalk. -- Used by popular tools like PostCSS, SVGO, Stylelint, and Browserslist. -- Node.js v6+ & browsers support. Support for both CJS and ESM projects. -- TypeScript type declarations included. -- [`NO_COLOR`](https://no-color.org/) friendly. - -## Docs -Read **[full docs](https://github.com/alexeyraspopov/picocolors#readme)** on GitHub. diff --git a/node_modules/picocolors/package.json b/node_modules/picocolors/package.json deleted file mode 100644 index 372d4b6..0000000 --- a/node_modules/picocolors/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "picocolors", - "version": "1.1.1", - "main": "./picocolors.js", - "types": "./picocolors.d.ts", - "browser": { - "./picocolors.js": "./picocolors.browser.js" - }, - "sideEffects": false, - "description": "The tiniest and the fastest library for terminal output formatting with ANSI colors", - "files": [ - "picocolors.*", - "types.d.ts" - ], - "keywords": [ - "terminal", - "colors", - "formatting", - "cli", - "console" - ], - "author": "Alexey Raspopov", - "repository": "alexeyraspopov/picocolors", - "license": "ISC" -} diff --git a/node_modules/picocolors/picocolors.browser.js b/node_modules/picocolors/picocolors.browser.js deleted file mode 100644 index 9dcf637..0000000 --- a/node_modules/picocolors/picocolors.browser.js +++ /dev/null @@ -1,4 +0,0 @@ -var x=String; -var create=function() {return {isColorSupported:false,reset:x,bold:x,dim:x,italic:x,underline:x,inverse:x,hidden:x,strikethrough:x,black:x,red:x,green:x,yellow:x,blue:x,magenta:x,cyan:x,white:x,gray:x,bgBlack:x,bgRed:x,bgGreen:x,bgYellow:x,bgBlue:x,bgMagenta:x,bgCyan:x,bgWhite:x,blackBright:x,redBright:x,greenBright:x,yellowBright:x,blueBright:x,magentaBright:x,cyanBright:x,whiteBright:x,bgBlackBright:x,bgRedBright:x,bgGreenBright:x,bgYellowBright:x,bgBlueBright:x,bgMagentaBright:x,bgCyanBright:x,bgWhiteBright:x}}; -module.exports=create(); -module.exports.createColors = create; diff --git a/node_modules/picocolors/picocolors.d.ts b/node_modules/picocolors/picocolors.d.ts deleted file mode 100644 index 94e146a..0000000 --- a/node_modules/picocolors/picocolors.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { Colors } from "./types" - -declare const picocolors: Colors & { createColors: (enabled?: boolean) => Colors } - -export = picocolors diff --git a/node_modules/picocolors/picocolors.js b/node_modules/picocolors/picocolors.js deleted file mode 100644 index e32df85..0000000 --- a/node_modules/picocolors/picocolors.js +++ /dev/null @@ -1,75 +0,0 @@ -let p = process || {}, argv = p.argv || [], env = p.env || {} -let isColorSupported = - !(!!env.NO_COLOR || argv.includes("--no-color")) && - (!!env.FORCE_COLOR || argv.includes("--color") || p.platform === "win32" || ((p.stdout || {}).isTTY && env.TERM !== "dumb") || !!env.CI) - -let formatter = (open, close, replace = open) => - input => { - let string = "" + input, index = string.indexOf(close, open.length) - return ~index ? open + replaceClose(string, close, replace, index) + close : open + string + close - } - -let replaceClose = (string, close, replace, index) => { - let result = "", cursor = 0 - do { - result += string.substring(cursor, index) + replace - cursor = index + close.length - index = string.indexOf(close, cursor) - } while (~index) - return result + string.substring(cursor) -} - -let createColors = (enabled = isColorSupported) => { - let f = enabled ? formatter : () => String - return { - isColorSupported: enabled, - reset: f("\x1b[0m", "\x1b[0m"), - bold: f("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m"), - dim: f("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m"), - italic: f("\x1b[3m", "\x1b[23m"), - underline: f("\x1b[4m", "\x1b[24m"), - inverse: f("\x1b[7m", "\x1b[27m"), - hidden: f("\x1b[8m", "\x1b[28m"), - strikethrough: f("\x1b[9m", "\x1b[29m"), - - black: f("\x1b[30m", "\x1b[39m"), - red: f("\x1b[31m", "\x1b[39m"), - green: f("\x1b[32m", "\x1b[39m"), - yellow: f("\x1b[33m", "\x1b[39m"), - blue: f("\x1b[34m", "\x1b[39m"), - magenta: f("\x1b[35m", "\x1b[39m"), - cyan: f("\x1b[36m", "\x1b[39m"), - white: f("\x1b[37m", "\x1b[39m"), - gray: f("\x1b[90m", "\x1b[39m"), - - bgBlack: f("\x1b[40m", "\x1b[49m"), - bgRed: f("\x1b[41m", "\x1b[49m"), - bgGreen: f("\x1b[42m", "\x1b[49m"), - bgYellow: f("\x1b[43m", "\x1b[49m"), - bgBlue: f("\x1b[44m", "\x1b[49m"), - bgMagenta: f("\x1b[45m", "\x1b[49m"), - bgCyan: f("\x1b[46m", "\x1b[49m"), - bgWhite: f("\x1b[47m", "\x1b[49m"), - - blackBright: f("\x1b[90m", "\x1b[39m"), - redBright: f("\x1b[91m", "\x1b[39m"), - greenBright: f("\x1b[92m", "\x1b[39m"), - yellowBright: f("\x1b[93m", "\x1b[39m"), - blueBright: f("\x1b[94m", "\x1b[39m"), - magentaBright: f("\x1b[95m", "\x1b[39m"), - cyanBright: f("\x1b[96m", "\x1b[39m"), - whiteBright: f("\x1b[97m", "\x1b[39m"), - - bgBlackBright: f("\x1b[100m", "\x1b[49m"), - bgRedBright: f("\x1b[101m", "\x1b[49m"), - bgGreenBright: f("\x1b[102m", "\x1b[49m"), - bgYellowBright: f("\x1b[103m", "\x1b[49m"), - bgBlueBright: f("\x1b[104m", "\x1b[49m"), - bgMagentaBright: f("\x1b[105m", "\x1b[49m"), - bgCyanBright: f("\x1b[106m", "\x1b[49m"), - bgWhiteBright: f("\x1b[107m", "\x1b[49m"), - } -} - -module.exports = createColors() -module.exports.createColors = createColors diff --git a/node_modules/picocolors/types.d.ts b/node_modules/picocolors/types.d.ts deleted file mode 100644 index cd1aec4..0000000 --- a/node_modules/picocolors/types.d.ts +++ /dev/null @@ -1,51 +0,0 @@ -export type Formatter = (input: string | number | null | undefined) => string - -export interface Colors { - isColorSupported: boolean - - reset: Formatter - bold: Formatter - dim: Formatter - italic: Formatter - underline: Formatter - inverse: Formatter - hidden: Formatter - strikethrough: Formatter - - black: Formatter - red: Formatter - green: Formatter - yellow: Formatter - blue: Formatter - magenta: Formatter - cyan: Formatter - white: Formatter - gray: Formatter - - bgBlack: Formatter - bgRed: Formatter - bgGreen: Formatter - bgYellow: Formatter - bgBlue: Formatter - bgMagenta: Formatter - bgCyan: Formatter - bgWhite: Formatter - - blackBright: Formatter - redBright: Formatter - greenBright: Formatter - yellowBright: Formatter - blueBright: Formatter - magentaBright: Formatter - cyanBright: Formatter - whiteBright: Formatter - - bgBlackBright: Formatter - bgRedBright: Formatter - bgGreenBright: Formatter - bgYellowBright: Formatter - bgBlueBright: Formatter - bgMagentaBright: Formatter - bgCyanBright: Formatter - bgWhiteBright: Formatter -} diff --git a/node_modules/picomatch/CHANGELOG.md b/node_modules/picomatch/CHANGELOG.md deleted file mode 100644 index 8ccc6c1..0000000 --- a/node_modules/picomatch/CHANGELOG.md +++ /dev/null @@ -1,136 +0,0 @@ -# Release history - -**All notable changes to this project will be documented in this file.** - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -
    - Guiding Principles - -- Changelogs are for humans, not machines. -- There should be an entry for every single version. -- The same types of changes should be grouped. -- Versions and sections should be linkable. -- The latest version comes first. -- The release date of each versions is displayed. -- Mention whether you follow Semantic Versioning. - -
    - -
    - Types of changes - -Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): - -- `Added` for new features. -- `Changed` for changes in existing functionality. -- `Deprecated` for soon-to-be removed features. -- `Removed` for now removed features. -- `Fixed` for any bug fixes. -- `Security` in case of vulnerabilities. - -
    - -## 2.3.1 (2022-01-02) - -### Fixed - -* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). - -### Changed - -* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). - -## 2.3.0 (2021-05-21) - -### Fixed - -* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) - -## 2.2.3 (2021-04-10) - -### Fixed - -* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). -* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). - -## 2.2.2 (2020-03-21) - -### Fixed - -* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). - -## 2.2.1 (2020-01-04) - -* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. - -## 2.2.0 (2020-01-04) - -* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) -* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. - -## 2.1.0 (2019-10-31) - -* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) -* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) -* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) -* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) -* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) -* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) -* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) -* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) -* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) -* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) -* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) -* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) -* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) -* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) -* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) -* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) -* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) -* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) -* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) -* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) -* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) -* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) - -## 2.0.7 (2019-05-14) - -* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) -* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) -* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) -* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) - -## 2.0.4 (2019-04-10) - -### Fixed - -- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. -- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. - -## 2.0.0 (2019-04-10) - -### Added - -- Adds support for `options.onIgnore`. See the readme for details -- Adds support for `options.onResult`. See the readme for details - -### Breaking changes - -- The unixify option was renamed to `windows` -- caching and all related options and methods have been removed - -## 1.0.0 (2018-11-05) - -- adds `.onMatch` option -- improvements to `.scan` method -- numerous improvements and optimizations for matching and parsing -- better windows path handling - -## 0.1.0 - 2017-04-13 - -First release. - - -[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE deleted file mode 100644 index 3608dca..0000000 --- a/node_modules/picomatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2017-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md deleted file mode 100644 index b0526e2..0000000 --- a/node_modules/picomatch/README.md +++ /dev/null @@ -1,708 +0,0 @@ -

    Picomatch

    - -

    - -version - - -test status - - -coverage status - - -downloads - -

    - -
    -
    - -

    -Blazing fast and accurate glob matcher written in JavaScript.
    -No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. -

    - -
    -
    - -## Why picomatch? - -* **Lightweight** - No dependencies -* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. -* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) -* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) -* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. -* **Well tested** - Thousands of unit tests - -See the [library comparison](#library-comparisons) to other libraries. - -
    -
    - -## Table of Contents - -
    Click to expand - -- [Install](#install) -- [Usage](#usage) -- [API](#api) - * [picomatch](#picomatch) - * [.test](#test) - * [.matchBase](#matchbase) - * [.isMatch](#ismatch) - * [.parse](#parse) - * [.scan](#scan) - * [.compileRe](#compilere) - * [.makeRe](#makere) - * [.toRegex](#toregex) -- [Options](#options) - * [Picomatch options](#picomatch-options) - * [Scan Options](#scan-options) - * [Options Examples](#options-examples) -- [Globbing features](#globbing-features) - * [Basic globbing](#basic-globbing) - * [Advanced globbing](#advanced-globbing) - * [Braces](#braces) - * [Matching special characters as literals](#matching-special-characters-as-literals) -- [Library Comparisons](#library-comparisons) -- [Benchmarks](#benchmarks) -- [Philosophies](#philosophies) -- [About](#about) - * [Author](#author) - * [License](#license) - -_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ - -
    - -
    -
    - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -npm install --save picomatch -``` - -
    - -## Usage - -The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. - -```js -const pm = require('picomatch'); -const isMatch = pm('*.js'); - -console.log(isMatch('abcd')); //=> false -console.log(isMatch('a.js')); //=> true -console.log(isMatch('a.md')); //=> false -console.log(isMatch('a/b.js')); //=> false -``` - -
    - -## API - -### [picomatch](lib/picomatch.js#L32) - -Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. - -**Params** - -* `globs` **{String|Array}**: One or more glob patterns. -* `options` **{Object=}** -* `returns` **{Function=}**: Returns a matcher function. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch(glob[, options]); - -const isMatch = picomatch('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.test](lib/picomatch.js#L117) - -Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. - -**Params** - -* `input` **{String}**: String to test. -* `regex` **{RegExp}** -* `returns` **{Object}**: Returns an object with matching info. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.test(input, regex[, options]); - -console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); -// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } -``` - -### [.matchBase](lib/picomatch.js#L161) - -Match the basename of a filepath. - -**Params** - -* `input` **{String}**: String to test. -* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). -* `returns` **{Boolean}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.matchBase(input, glob[, options]); -console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true -``` - -### [.isMatch](lib/picomatch.js#L183) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* **{String|Array}**: str The string to test. -* **{String|Array}**: patterns One or more glob patterns to use for matching. -* **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.isMatch(string, patterns[, options]); - -console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(picomatch.isMatch('a.a', 'b.*')); //=> false -``` - -### [.parse](lib/picomatch.js#L199) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.parse(pattern[, options]); -``` - -### [.scan](lib/picomatch.js#L231) - -Scan a glob pattern to separate the pattern into segments. - -**Params** - -* `input` **{String}**: Glob pattern to scan. -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.scan(input[, options]); - -const result = picomatch.scan('!./foo/*.js'); -console.log(result); -{ prefix: '!./', - input: '!./foo/*.js', - start: 3, - base: 'foo', - glob: '*.js', - isBrace: false, - isBracket: false, - isGlob: true, - isExtglob: false, - isGlobstar: false, - negated: true } -``` - -### [.compileRe](lib/picomatch.js#L245) - -Compile a regular expression from the `state` object returned by the -[parse()](#parse) method. - -**Params** - -* `state` **{Object}** -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. -* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. -* `returns` **{RegExp}** - -### [.makeRe](lib/picomatch.js#L286) - -Create a regular expression from a parsed glob pattern. - -**Params** - -* `state` **{String}**: The object returned from the `.parse` method. -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. -* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const picomatch = require('picomatch'); -const state = picomatch.parse('*.js'); -// picomatch.compileRe(state[, options]); - -console.log(picomatch.compileRe(state)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -### [.toRegex](lib/picomatch.js#L321) - -Create a regular expression from the given regex source string. - -**Params** - -* `source` **{String}**: Regular expression source string. -* `options` **{Object}** -* `returns` **{RegExp}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.toRegex(source[, options]); - -const { output } = picomatch.parse('*.js'); -console.log(picomatch.toRegex(output)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -
    - -## Options - -### Picomatch options - -The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | -| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | - -picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. - -### Scan Options - -In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | -| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.scan('!./foo/*.js', { tokens: true }); -console.log(result); -// { -// prefix: '!./', -// input: '!./foo/*.js', -// start: 3, -// base: 'foo', -// glob: '*.js', -// isBrace: false, -// isBracket: false, -// isGlob: true, -// isExtglob: false, -// isGlobstar: false, -// negated: true, -// maxDepth: 2, -// tokens: [ -// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, -// { value: 'foo', depth: 1, isGlob: false }, -// { value: '*.js', depth: 1, isGlob: true } -// ], -// slashes: [ 2, 6 ], -// parts: [ 'foo', '*.js' ] -// } -``` - -
    - -### Options Examples - -#### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a folder - -```js -const fill = require('fill-range'); -const regex = pm.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex); -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -#### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')); //=> true -``` - -#### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onMatch }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -
    -
    - -## Globbing features - -* [Basic globbing](#basic-globbing) (Wildcard matching) -* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) - -### Basic globbing - -| **Character** | **Description** | -| --- | --- | -| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | -| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | -| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | -| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | - -#### Matching behavior vs. Bash - -Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: - -* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. -* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. - -
    - -### Advanced globbing - -* [extglobs](#extglobs) -* [POSIX brackets](#posix-brackets) -* [Braces](#brace-expansion) - -#### Extglobs - -| **Pattern** | **Description** | -| --- | --- | -| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | -| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | -| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | -| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | -| `!(pattern)` | Match _anything but_ `pattern` | - -**Examples** - -```js -const pm = require('picomatch'); - -// *(pattern) matches ZERO or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// +(pattern) matches ONE or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// supports multiple extglobs -console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false - -// supports nested extglobs -console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true -``` - -#### POSIX brackets - -POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. - -**Enable POSIX bracket support** - -```js -console.log(pm.makeRe('[[:word:]]+', { posix: true })); -//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ -``` - -**Supported POSIX classes** - -The following named POSIX bracket expressions are supported: - -* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` -* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. -* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. -* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. -* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. -* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. -* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. -* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. -* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. -* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. -* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. -* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. -* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. -* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. - -See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. - -### Braces - -Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. - -### Matching special characters as literals - -If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: - -**Special Characters** - -Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. - -To match any of the following characters as literals: `$^*+?()[] - -Examples: - -```js -console.log(pm.makeRe('foo/bar \\(1\\)')); -console.log(pm.makeRe('foo/bar \\(1\\)')); -``` - -
    -
    - -## Library Comparisons - -The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). - -| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | -| --- | --- | --- | --- | --- | --- | --- | --- | -| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | -| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | -| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | -| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | -| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | -| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | -| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | -| File system operations | - | - | - | - | - | - | - | - -
    -
    - -## Benchmarks - -Performance comparison of picomatch and minimatch. - -``` -# .makeRe star - picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) - minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) - -# .makeRe star; dot=true - picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) - minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) - -# .makeRe globstar - picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) - minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) - -# .makeRe globstars - picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) - minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) - -# .makeRe with leading star - picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) - minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) - -# .makeRe - basic braces - picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) - minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) -``` - -
    -
    - -## Philosophies - -The goal of this library is to be blazing fast, without compromising on accuracy. - -**Accuracy** - -The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. - -Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. - -**Performance** - -Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. - -
    -
    - -## About - -
    -Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
    - -
    -Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -npm install && npm test -``` - -
    - -
    -Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
    - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js deleted file mode 100644 index d2f2bc5..0000000 --- a/node_modules/picomatch/index.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict'; - -module.exports = require('./lib/picomatch'); diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js deleted file mode 100644 index a62ef38..0000000 --- a/node_modules/picomatch/lib/constants.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict'; - -const path = require('path'); -const WIN_SLASH = '\\\\/'; -const WIN_NO_SLASH = `[^${WIN_SLASH}]`; - -/** - * Posix glob regex - */ - -const DOT_LITERAL = '\\.'; -const PLUS_LITERAL = '\\+'; -const QMARK_LITERAL = '\\?'; -const SLASH_LITERAL = '\\/'; -const ONE_CHAR = '(?=.)'; -const QMARK = '[^/]'; -const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; -const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; -const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; -const NO_DOT = `(?!${DOT_LITERAL})`; -const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; -const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; -const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; -const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; -const STAR = `${QMARK}*?`; - -const POSIX_CHARS = { - DOT_LITERAL, - PLUS_LITERAL, - QMARK_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - QMARK, - END_ANCHOR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK_NO_DOT, - STAR, - START_ANCHOR -}; - -/** - * Windows glob regex - */ - -const WINDOWS_CHARS = { - ...POSIX_CHARS, - - SLASH_LITERAL: `[${WIN_SLASH}]`, - QMARK: WIN_NO_SLASH, - STAR: `${WIN_NO_SLASH}*?`, - DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, - NO_DOT: `(?!${DOT_LITERAL})`, - NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, - NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - QMARK_NO_DOT: `[^.${WIN_SLASH}]`, - START_ANCHOR: `(?:^|[${WIN_SLASH}])`, - END_ANCHOR: `(?:[${WIN_SLASH}]|$)` -}; - -/** - * POSIX Bracket Regex - */ - -const POSIX_REGEX_SOURCE = { - alnum: 'a-zA-Z0-9', - alpha: 'a-zA-Z', - ascii: '\\x00-\\x7F', - blank: ' \\t', - cntrl: '\\x00-\\x1F\\x7F', - digit: '0-9', - graph: '\\x21-\\x7E', - lower: 'a-z', - print: '\\x20-\\x7E ', - punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', - space: ' \\t\\r\\n\\v\\f', - upper: 'A-Z', - word: 'A-Za-z0-9_', - xdigit: 'A-Fa-f0-9' -}; - -module.exports = { - MAX_LENGTH: 1024 * 64, - POSIX_REGEX_SOURCE, - - // regular expressions - REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, - REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, - REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, - REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, - REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, - REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, - - // Replace globs with equivalent patterns to reduce parsing time. - REPLACEMENTS: { - '***': '*', - '**/**': '**', - '**/**/**': '**' - }, - - // Digits - CHAR_0: 48, /* 0 */ - CHAR_9: 57, /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 65, /* A */ - CHAR_LOWERCASE_A: 97, /* a */ - CHAR_UPPERCASE_Z: 90, /* Z */ - CHAR_LOWERCASE_Z: 122, /* z */ - - CHAR_LEFT_PARENTHESES: 40, /* ( */ - CHAR_RIGHT_PARENTHESES: 41, /* ) */ - - CHAR_ASTERISK: 42, /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: 38, /* & */ - CHAR_AT: 64, /* @ */ - CHAR_BACKWARD_SLASH: 92, /* \ */ - CHAR_CARRIAGE_RETURN: 13, /* \r */ - CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ - CHAR_COLON: 58, /* : */ - CHAR_COMMA: 44, /* , */ - CHAR_DOT: 46, /* . */ - CHAR_DOUBLE_QUOTE: 34, /* " */ - CHAR_EQUAL: 61, /* = */ - CHAR_EXCLAMATION_MARK: 33, /* ! */ - CHAR_FORM_FEED: 12, /* \f */ - CHAR_FORWARD_SLASH: 47, /* / */ - CHAR_GRAVE_ACCENT: 96, /* ` */ - CHAR_HASH: 35, /* # */ - CHAR_HYPHEN_MINUS: 45, /* - */ - CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ - CHAR_LEFT_CURLY_BRACE: 123, /* { */ - CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ - CHAR_LINE_FEED: 10, /* \n */ - CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ - CHAR_PERCENT: 37, /* % */ - CHAR_PLUS: 43, /* + */ - CHAR_QUESTION_MARK: 63, /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ - CHAR_RIGHT_CURLY_BRACE: 125, /* } */ - CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ - CHAR_SEMICOLON: 59, /* ; */ - CHAR_SINGLE_QUOTE: 39, /* ' */ - CHAR_SPACE: 32, /* */ - CHAR_TAB: 9, /* \t */ - CHAR_UNDERSCORE: 95, /* _ */ - CHAR_VERTICAL_LINE: 124, /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - - SEP: path.sep, - - /** - * Create EXTGLOB_CHARS - */ - - extglobChars(chars) { - return { - '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, - '?': { type: 'qmark', open: '(?:', close: ')?' }, - '+': { type: 'plus', open: '(?:', close: ')+' }, - '*': { type: 'star', open: '(?:', close: ')*' }, - '@': { type: 'at', open: '(?:', close: ')' } - }; - }, - - /** - * Create GLOB_CHARS - */ - - globChars(win32) { - return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; - } -}; diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js deleted file mode 100644 index 58269d0..0000000 --- a/node_modules/picomatch/lib/parse.js +++ /dev/null @@ -1,1091 +0,0 @@ -'use strict'; - -const constants = require('./constants'); -const utils = require('./utils'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - POSIX_REGEX_SOURCE, - REGEX_NON_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_BACKREF, - REPLACEMENTS -} = constants; - -/** - * Helpers - */ - -const expandRange = (args, options) => { - if (typeof options.expandRange === 'function') { - return options.expandRange(...args, options); - } - - args.sort(); - const value = `[${args.join('-')}]`; - - try { - /* eslint-disable-next-line no-new */ - new RegExp(value); - } catch (ex) { - return args.map(v => utils.escapeRegex(v)).join('..'); - } - - return value; -}; - -/** - * Create the message for a syntax error - */ - -const syntaxError = (type, char) => { - return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; -}; - -/** - * Parse the given input string. - * @param {String} input - * @param {Object} options - * @return {Object} - */ - -const parse = (input, options) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - input = REPLACEMENTS[input] || input; - - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - - let len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - const bos = { type: 'bos', value: '', output: opts.prepend || '' }; - const tokens = [bos]; - - const capture = opts.capture ? '' : '?:'; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const PLATFORM_CHARS = constants.globChars(win32); - const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); - - const { - DOT_LITERAL, - PLUS_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK, - QMARK_NO_DOT, - STAR, - START_ANCHOR - } = PLATFORM_CHARS; - - const globstar = opts => { - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const nodot = opts.dot ? '' : NO_DOT; - const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; - let star = opts.bash === true ? globstar(opts) : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - // minimatch options support - if (typeof opts.noext === 'boolean') { - opts.noextglob = opts.noext; - } - - const state = { - input, - index: -1, - start: 0, - dot: opts.dot === true, - consumed: '', - output: '', - prefix: '', - backtrack: false, - negated: false, - brackets: 0, - braces: 0, - parens: 0, - quotes: 0, - globstar: false, - tokens - }; - - input = utils.removePrefix(input, state); - len = input.length; - - const extglobs = []; - const braces = []; - const stack = []; - let prev = bos; - let value; - - /** - * Tokenizing helpers - */ - - const eos = () => state.index === len - 1; - const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index] || ''; - const remaining = () => input.slice(state.index + 1); - const consume = (value = '', num = 0) => { - state.consumed += value; - state.index += num; - }; - - const append = token => { - state.output += token.output != null ? token.output : token.value; - consume(token.value); - }; - - const negate = () => { - let count = 1; - - while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { - advance(); - state.start++; - count++; - } - - if (count % 2 === 0) { - return false; - } - - state.negated = true; - state.start++; - return true; - }; - - const increment = type => { - state[type]++; - stack.push(type); - }; - - const decrement = type => { - state[type]--; - stack.pop(); - }; - - /** - * Push tokens onto the tokens array. This helper speeds up - * tokenizing by 1) helping us avoid backtracking as much as possible, - * and 2) helping us avoid creating extra tokens when consecutive - * characters are plain text. This improves performance and simplifies - * lookbehinds. - */ - - const push = tok => { - if (prev.type === 'globstar') { - const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); - const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); - - if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { - state.output = state.output.slice(0, -prev.output.length); - prev.type = 'star'; - prev.value = '*'; - prev.output = star; - state.output += prev.output; - } - } - - if (extglobs.length && tok.type !== 'paren') { - extglobs[extglobs.length - 1].inner += tok.value; - } - - if (tok.value || tok.output) append(tok); - if (prev && prev.type === 'text' && tok.type === 'text') { - prev.value += tok.value; - prev.output = (prev.output || '') + tok.value; - return; - } - - tok.prev = prev; - tokens.push(tok); - prev = tok; - }; - - const extglobOpen = (type, value) => { - const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; - - token.prev = prev; - token.parens = state.parens; - token.output = state.output; - const output = (opts.capture ? '(' : '') + token.open; - - increment('parens'); - push({ type, value, output: state.output ? '' : ONE_CHAR }); - push({ type: 'paren', extglob: true, value: advance(), output }); - extglobs.push(token); - }; - - const extglobClose = token => { - let output = token.close + (opts.capture ? ')' : ''); - let rest; - - if (token.type === 'negate') { - let extglobStar = star; - - if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { - extglobStar = globstar(opts); - } - - if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { - output = token.close = `)$))${extglobStar}`; - } - - if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { - // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. - // In this case, we need to parse the string and use it in the output of the original pattern. - // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. - // - // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. - const expression = parse(rest, { ...options, fastpaths: false }).output; - - output = token.close = `)${expression})${extglobStar})`; - } - - if (token.prev.type === 'bos') { - state.negatedExtglob = true; - } - } - - push({ type: 'paren', extglob: true, value, output }); - decrement('parens'); - }; - - /** - * Fast paths - */ - - if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { - let backslashes = false; - - let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { - if (first === '\\') { - backslashes = true; - return m; - } - - if (first === '?') { - if (esc) { - return esc + first + (rest ? QMARK.repeat(rest.length) : ''); - } - if (index === 0) { - return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); - } - return QMARK.repeat(chars.length); - } - - if (first === '.') { - return DOT_LITERAL.repeat(chars.length); - } - - if (first === '*') { - if (esc) { - return esc + first + (rest ? star : ''); - } - return star; - } - return esc ? m : `\\${m}`; - }); - - if (backslashes === true) { - if (opts.unescape === true) { - output = output.replace(/\\/g, ''); - } else { - output = output.replace(/\\+/g, m => { - return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); - }); - } - } - - if (output === input && opts.contains === true) { - state.output = input; - return state; - } - - state.output = utils.wrapOutput(output, state, options); - return state; - } - - /** - * Tokenize input until we reach end-of-string - */ - - while (!eos()) { - value = advance(); - - if (value === '\u0000') { - continue; - } - - /** - * Escaped characters - */ - - if (value === '\\') { - const next = peek(); - - if (next === '/' && opts.bash !== true) { - continue; - } - - if (next === '.' || next === ';') { - continue; - } - - if (!next) { - value += '\\'; - push({ type: 'text', value }); - continue; - } - - // collapse slashes to reduce potential for exploits - const match = /^\\+/.exec(remaining()); - let slashes = 0; - - if (match && match[0].length > 2) { - slashes = match[0].length; - state.index += slashes; - if (slashes % 2 !== 0) { - value += '\\'; - } - } - - if (opts.unescape === true) { - value = advance(); - } else { - value += advance(); - } - - if (state.brackets === 0) { - push({ type: 'text', value }); - continue; - } - } - - /** - * If we're inside a regex character class, continue - * until we reach the closing bracket. - */ - - if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { - if (opts.posix !== false && value === ':') { - const inner = prev.value.slice(1); - if (inner.includes('[')) { - prev.posix = true; - - if (inner.includes(':')) { - const idx = prev.value.lastIndexOf('['); - const pre = prev.value.slice(0, idx); - const rest = prev.value.slice(idx + 2); - const posix = POSIX_REGEX_SOURCE[rest]; - if (posix) { - prev.value = pre + posix; - state.backtrack = true; - advance(); - - if (!bos.output && tokens.indexOf(prev) === 1) { - bos.output = ONE_CHAR; - } - continue; - } - } - } - } - - if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { - value = `\\${value}`; - } - - if (value === ']' && (prev.value === '[' || prev.value === '[^')) { - value = `\\${value}`; - } - - if (opts.posix === true && value === '!' && prev.value === '[') { - value = '^'; - } - - prev.value += value; - append({ value }); - continue; - } - - /** - * If we're inside a quoted string, continue - * until we reach the closing double quote. - */ - - if (state.quotes === 1 && value !== '"') { - value = utils.escapeRegex(value); - prev.value += value; - append({ value }); - continue; - } - - /** - * Double quotes - */ - - if (value === '"') { - state.quotes = state.quotes === 1 ? 0 : 1; - if (opts.keepQuotes === true) { - push({ type: 'text', value }); - } - continue; - } - - /** - * Parentheses - */ - - if (value === '(') { - increment('parens'); - push({ type: 'paren', value }); - continue; - } - - if (value === ')') { - if (state.parens === 0 && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '(')); - } - - const extglob = extglobs[extglobs.length - 1]; - if (extglob && state.parens === extglob.parens + 1) { - extglobClose(extglobs.pop()); - continue; - } - - push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); - decrement('parens'); - continue; - } - - /** - * Square brackets - */ - - if (value === '[') { - if (opts.nobracket === true || !remaining().includes(']')) { - if (opts.nobracket !== true && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('closing', ']')); - } - - value = `\\${value}`; - } else { - increment('brackets'); - } - - push({ type: 'bracket', value }); - continue; - } - - if (value === ']') { - if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - if (state.brackets === 0) { - if (opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '[')); - } - - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - decrement('brackets'); - - const prevValue = prev.value.slice(1); - if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { - value = `/${value}`; - } - - prev.value += value; - append({ value }); - - // when literal brackets are explicitly disabled - // assume we should match with a regex character class - if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { - continue; - } - - const escaped = utils.escapeRegex(prev.value); - state.output = state.output.slice(0, -prev.value.length); - - // when literal brackets are explicitly enabled - // assume we should escape the brackets to match literal characters - if (opts.literalBrackets === true) { - state.output += escaped; - prev.value = escaped; - continue; - } - - // when the user specifies nothing, try to match both - prev.value = `(${capture}${escaped}|${prev.value})`; - state.output += prev.value; - continue; - } - - /** - * Braces - */ - - if (value === '{' && opts.nobrace !== true) { - increment('braces'); - - const open = { - type: 'brace', - value, - output: '(', - outputIndex: state.output.length, - tokensIndex: state.tokens.length - }; - - braces.push(open); - push(open); - continue; - } - - if (value === '}') { - const brace = braces[braces.length - 1]; - - if (opts.nobrace === true || !brace) { - push({ type: 'text', value, output: value }); - continue; - } - - let output = ')'; - - if (brace.dots === true) { - const arr = tokens.slice(); - const range = []; - - for (let i = arr.length - 1; i >= 0; i--) { - tokens.pop(); - if (arr[i].type === 'brace') { - break; - } - if (arr[i].type !== 'dots') { - range.unshift(arr[i].value); - } - } - - output = expandRange(range, opts); - state.backtrack = true; - } - - if (brace.comma !== true && brace.dots !== true) { - const out = state.output.slice(0, brace.outputIndex); - const toks = state.tokens.slice(brace.tokensIndex); - brace.value = brace.output = '\\{'; - value = output = '\\}'; - state.output = out; - for (const t of toks) { - state.output += (t.output || t.value); - } - } - - push({ type: 'brace', value, output }); - decrement('braces'); - braces.pop(); - continue; - } - - /** - * Pipes - */ - - if (value === '|') { - if (extglobs.length > 0) { - extglobs[extglobs.length - 1].conditions++; - } - push({ type: 'text', value }); - continue; - } - - /** - * Commas - */ - - if (value === ',') { - let output = value; - - const brace = braces[braces.length - 1]; - if (brace && stack[stack.length - 1] === 'braces') { - brace.comma = true; - output = '|'; - } - - push({ type: 'comma', value, output }); - continue; - } - - /** - * Slashes - */ - - if (value === '/') { - // if the beginning of the glob is "./", advance the start - // to the current index, and don't add the "./" characters - // to the state. This greatly simplifies lookbehinds when - // checking for BOS characters like "!" and "." (not "./") - if (prev.type === 'dot' && state.index === state.start + 1) { - state.start = state.index + 1; - state.consumed = ''; - state.output = ''; - tokens.pop(); - prev = bos; // reset "prev" to the first token - continue; - } - - push({ type: 'slash', value, output: SLASH_LITERAL }); - continue; - } - - /** - * Dots - */ - - if (value === '.') { - if (state.braces > 0 && prev.type === 'dot') { - if (prev.value === '.') prev.output = DOT_LITERAL; - const brace = braces[braces.length - 1]; - prev.type = 'dots'; - prev.output += value; - prev.value += value; - brace.dots = true; - continue; - } - - if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { - push({ type: 'text', value, output: DOT_LITERAL }); - continue; - } - - push({ type: 'dot', value, output: DOT_LITERAL }); - continue; - } - - /** - * Question marks - */ - - if (value === '?') { - const isGroup = prev && prev.value === '('; - if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('qmark', value); - continue; - } - - if (prev && prev.type === 'paren') { - const next = peek(); - let output = value; - - if (next === '<' && !utils.supportsLookbehinds()) { - throw new Error('Node.js v10 or higher is required for regex lookbehinds'); - } - - if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { - output = `\\${value}`; - } - - push({ type: 'text', value, output }); - continue; - } - - if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { - push({ type: 'qmark', value, output: QMARK_NO_DOT }); - continue; - } - - push({ type: 'qmark', value, output: QMARK }); - continue; - } - - /** - * Exclamation - */ - - if (value === '!') { - if (opts.noextglob !== true && peek() === '(') { - if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { - extglobOpen('negate', value); - continue; - } - } - - if (opts.nonegate !== true && state.index === 0) { - negate(); - continue; - } - } - - /** - * Plus - */ - - if (value === '+') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('plus', value); - continue; - } - - if ((prev && prev.value === '(') || opts.regex === false) { - push({ type: 'plus', value, output: PLUS_LITERAL }); - continue; - } - - if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { - push({ type: 'plus', value }); - continue; - } - - push({ type: 'plus', value: PLUS_LITERAL }); - continue; - } - - /** - * Plain text - */ - - if (value === '@') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - push({ type: 'at', extglob: true, value, output: '' }); - continue; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Plain text - */ - - if (value !== '*') { - if (value === '$' || value === '^') { - value = `\\${value}`; - } - - const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); - if (match) { - value += match[0]; - state.index += match[0].length; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Stars - */ - - if (prev && (prev.type === 'globstar' || prev.star === true)) { - prev.type = 'star'; - prev.star = true; - prev.value += value; - prev.output = star; - state.backtrack = true; - state.globstar = true; - consume(value); - continue; - } - - let rest = remaining(); - if (opts.noextglob !== true && /^\([^?]/.test(rest)) { - extglobOpen('star', value); - continue; - } - - if (prev.type === 'star') { - if (opts.noglobstar === true) { - consume(value); - continue; - } - - const prior = prev.prev; - const before = prior.prev; - const isStart = prior.type === 'slash' || prior.type === 'bos'; - const afterStar = before && (before.type === 'star' || before.type === 'globstar'); - - if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { - push({ type: 'star', value, output: '' }); - continue; - } - - const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); - const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); - if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { - push({ type: 'star', value, output: '' }); - continue; - } - - // strip consecutive `/**/` - while (rest.slice(0, 3) === '/**') { - const after = input[state.index + 4]; - if (after && after !== '/') { - break; - } - rest = rest.slice(3); - consume('/**', 3); - } - - if (prior.type === 'bos' && eos()) { - prev.type = 'globstar'; - prev.value += value; - prev.output = globstar(opts); - state.output = prev.output; - state.globstar = true; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); - prev.value += value; - state.globstar = true; - state.output += prior.output + prev.output; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { - const end = rest[1] !== void 0 ? '|$' : ''; - - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; - prev.value += value; - - state.output += prior.output + prev.output; - state.globstar = true; - - consume(value + advance()); - - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - if (prior.type === 'bos' && rest[0] === '/') { - prev.type = 'globstar'; - prev.value += value; - prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; - state.output = prev.output; - state.globstar = true; - consume(value + advance()); - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - // remove single star from output - state.output = state.output.slice(0, -prev.output.length); - - // reset previous token to globstar - prev.type = 'globstar'; - prev.output = globstar(opts); - prev.value += value; - - // reset output with globstar - state.output += prev.output; - state.globstar = true; - consume(value); - continue; - } - - const token = { type: 'star', value, output: star }; - - if (opts.bash === true) { - token.output = '.*?'; - if (prev.type === 'bos' || prev.type === 'slash') { - token.output = nodot + token.output; - } - push(token); - continue; - } - - if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { - token.output = value; - push(token); - continue; - } - - if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { - if (prev.type === 'dot') { - state.output += NO_DOT_SLASH; - prev.output += NO_DOT_SLASH; - - } else if (opts.dot === true) { - state.output += NO_DOTS_SLASH; - prev.output += NO_DOTS_SLASH; - - } else { - state.output += nodot; - prev.output += nodot; - } - - if (peek() !== '*') { - state.output += ONE_CHAR; - prev.output += ONE_CHAR; - } - } - - push(token); - } - - while (state.brackets > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); - state.output = utils.escapeLast(state.output, '['); - decrement('brackets'); - } - - while (state.parens > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); - state.output = utils.escapeLast(state.output, '('); - decrement('parens'); - } - - while (state.braces > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); - state.output = utils.escapeLast(state.output, '{'); - decrement('braces'); - } - - if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { - push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); - } - - // rebuild the output if we had to backtrack at any point - if (state.backtrack === true) { - state.output = ''; - - for (const token of state.tokens) { - state.output += token.output != null ? token.output : token.value; - - if (token.suffix) { - state.output += token.suffix; - } - } - } - - return state; -}; - -/** - * Fast paths for creating regular expressions for common glob patterns. - * This can significantly speed up processing and has very little downside - * impact when none of the fast paths match. - */ - -parse.fastpaths = (input, options) => { - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - const len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - input = REPLACEMENTS[input] || input; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const { - DOT_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOTS_SLASH, - STAR, - START_ANCHOR - } = constants.globChars(win32); - - const nodot = opts.dot ? NO_DOTS : NO_DOT; - const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; - const capture = opts.capture ? '' : '?:'; - const state = { negated: false, prefix: '' }; - let star = opts.bash === true ? '.*?' : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - const globstar = opts => { - if (opts.noglobstar === true) return star; - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const create = str => { - switch (str) { - case '*': - return `${nodot}${ONE_CHAR}${star}`; - - case '.*': - return `${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*.*': - return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*/*': - return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; - - case '**': - return nodot + globstar(opts); - - case '**/*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; - - case '**/*.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '**/.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; - - default: { - const match = /^(.*?)\.(\w+)$/.exec(str); - if (!match) return; - - const source = create(match[1]); - if (!source) return; - - return source + DOT_LITERAL + match[2]; - } - } - }; - - const output = utils.removePrefix(input, state); - let source = create(output); - - if (source && opts.strictSlashes !== true) { - source += `${SLASH_LITERAL}?`; - } - - return source; -}; - -module.exports = parse; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js deleted file mode 100644 index 782d809..0000000 --- a/node_modules/picomatch/lib/picomatch.js +++ /dev/null @@ -1,342 +0,0 @@ -'use strict'; - -const path = require('path'); -const scan = require('./scan'); -const parse = require('./parse'); -const utils = require('./utils'); -const constants = require('./constants'); -const isObject = val => val && typeof val === 'object' && !Array.isArray(val); - -/** - * Creates a matcher function from one or more glob patterns. The - * returned function takes a string to match as its first argument, - * and returns true if the string is a match. The returned matcher - * function also takes a boolean as the second argument that, when true, - * returns an object with additional information. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch(glob[, options]); - * - * const isMatch = picomatch('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @name picomatch - * @param {String|Array} `globs` One or more glob patterns. - * @param {Object=} `options` - * @return {Function=} Returns a matcher function. - * @api public - */ - -const picomatch = (glob, options, returnState = false) => { - if (Array.isArray(glob)) { - const fns = glob.map(input => picomatch(input, options, returnState)); - const arrayMatcher = str => { - for (const isMatch of fns) { - const state = isMatch(str); - if (state) return state; - } - return false; - }; - return arrayMatcher; - } - - const isState = isObject(glob) && glob.tokens && glob.input; - - if (glob === '' || (typeof glob !== 'string' && !isState)) { - throw new TypeError('Expected pattern to be a non-empty string'); - } - - const opts = options || {}; - const posix = utils.isWindows(options); - const regex = isState - ? picomatch.compileRe(glob, options) - : picomatch.makeRe(glob, options, false, true); - - const state = regex.state; - delete regex.state; - - let isIgnored = () => false; - if (opts.ignore) { - const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; - isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); - } - - const matcher = (input, returnObject = false) => { - const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); - const result = { glob, state, regex, posix, input, output, match, isMatch }; - - if (typeof opts.onResult === 'function') { - opts.onResult(result); - } - - if (isMatch === false) { - result.isMatch = false; - return returnObject ? result : false; - } - - if (isIgnored(input)) { - if (typeof opts.onIgnore === 'function') { - opts.onIgnore(result); - } - result.isMatch = false; - return returnObject ? result : false; - } - - if (typeof opts.onMatch === 'function') { - opts.onMatch(result); - } - return returnObject ? result : true; - }; - - if (returnState) { - matcher.state = state; - } - - return matcher; -}; - -/** - * Test `input` with the given `regex`. This is used by the main - * `picomatch()` function to test the input string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.test(input, regex[, options]); - * - * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); - * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } - * ``` - * @param {String} `input` String to test. - * @param {RegExp} `regex` - * @return {Object} Returns an object with matching info. - * @api public - */ - -picomatch.test = (input, regex, options, { glob, posix } = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected input to be a string'); - } - - if (input === '') { - return { isMatch: false, output: '' }; - } - - const opts = options || {}; - const format = opts.format || (posix ? utils.toPosixSlashes : null); - let match = input === glob; - let output = (match && format) ? format(input) : input; - - if (match === false) { - output = format ? format(input) : input; - match = output === glob; - } - - if (match === false || opts.capture === true) { - if (opts.matchBase === true || opts.basename === true) { - match = picomatch.matchBase(input, regex, options, posix); - } else { - match = regex.exec(output); - } - } - - return { isMatch: Boolean(match), match, output }; -}; - -/** - * Match the basename of a filepath. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.matchBase(input, glob[, options]); - * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true - * ``` - * @param {String} `input` String to test. - * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). - * @return {Boolean} - * @api public - */ - -picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { - const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); - return regex.test(path.basename(input)); -}; - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.isMatch(string, patterns[, options]); - * - * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String|Array} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const picomatch = require('picomatch'); - * const result = picomatch.parse(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as a regex source string. - * @api public - */ - -picomatch.parse = (pattern, options) => { - if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); - return parse(pattern, { ...options, fastpaths: false }); -}; - -/** - * Scan a glob pattern to separate the pattern into segments. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.scan(input[, options]); - * - * const result = picomatch.scan('!./foo/*.js'); - * console.log(result); - * { prefix: '!./', - * input: '!./foo/*.js', - * start: 3, - * base: 'foo', - * glob: '*.js', - * isBrace: false, - * isBracket: false, - * isGlob: true, - * isExtglob: false, - * isGlobstar: false, - * negated: true } - * ``` - * @param {String} `input` Glob pattern to scan. - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -picomatch.scan = (input, options) => scan(input, options); - -/** - * Compile a regular expression from the `state` object returned by the - * [parse()](#parse) method. - * - * @param {Object} `state` - * @param {Object} `options` - * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. - * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. - * @return {RegExp} - * @api public - */ - -picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { - if (returnOutput === true) { - return state.output; - } - - const opts = options || {}; - const prepend = opts.contains ? '' : '^'; - const append = opts.contains ? '' : '$'; - - let source = `${prepend}(?:${state.output})${append}`; - if (state && state.negated === true) { - source = `^(?!${source}).*$`; - } - - const regex = picomatch.toRegex(source, options); - if (returnState === true) { - regex.state = state; - } - - return regex; -}; - -/** - * Create a regular expression from a parsed glob pattern. - * - * ```js - * const picomatch = require('picomatch'); - * const state = picomatch.parse('*.js'); - * // picomatch.compileRe(state[, options]); - * - * console.log(picomatch.compileRe(state)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `state` The object returned from the `.parse` method. - * @param {Object} `options` - * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. - * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { - if (!input || typeof input !== 'string') { - throw new TypeError('Expected a non-empty string'); - } - - let parsed = { negated: false, fastpaths: true }; - - if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - parsed.output = parse.fastpaths(input, options); - } - - if (!parsed.output) { - parsed = parse(input, options); - } - - return picomatch.compileRe(parsed, options, returnOutput, returnState); -}; - -/** - * Create a regular expression from the given regex source string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.toRegex(source[, options]); - * - * const { output } = picomatch.parse('*.js'); - * console.log(picomatch.toRegex(output)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `source` Regular expression source string. - * @param {Object} `options` - * @return {RegExp} - * @api public - */ - -picomatch.toRegex = (source, options) => { - try { - const opts = options || {}; - return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); - } catch (err) { - if (options && options.debug === true) throw err; - return /$^/; - } -}; - -/** - * Picomatch constants. - * @return {Object} - */ - -picomatch.constants = constants; - -/** - * Expose "picomatch" - */ - -module.exports = picomatch; diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js deleted file mode 100644 index e59cd7a..0000000 --- a/node_modules/picomatch/lib/scan.js +++ /dev/null @@ -1,391 +0,0 @@ -'use strict'; - -const utils = require('./utils'); -const { - CHAR_ASTERISK, /* * */ - CHAR_AT, /* @ */ - CHAR_BACKWARD_SLASH, /* \ */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_EXCLAMATION_MARK, /* ! */ - CHAR_FORWARD_SLASH, /* / */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_PLUS, /* + */ - CHAR_QUESTION_MARK, /* ? */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_RIGHT_SQUARE_BRACKET /* ] */ -} = require('./constants'); - -const isPathSeparator = code => { - return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; -}; - -const depth = token => { - if (token.isPrefix !== true) { - token.depth = token.isGlobstar ? Infinity : 1; - } -}; - -/** - * Quickly scans a glob pattern and returns an object with a handful of - * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not - * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). - * - * ```js - * const pm = require('picomatch'); - * console.log(pm.scan('foo/bar/*.js')); - * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {Object} Returns an object with tokens and regex source string. - * @api public - */ - -const scan = (input, options) => { - const opts = options || {}; - - const length = input.length - 1; - const scanToEnd = opts.parts === true || opts.scanToEnd === true; - const slashes = []; - const tokens = []; - const parts = []; - - let str = input; - let index = -1; - let start = 0; - let lastIndex = 0; - let isBrace = false; - let isBracket = false; - let isGlob = false; - let isExtglob = false; - let isGlobstar = false; - let braceEscaped = false; - let backslashes = false; - let negated = false; - let negatedExtglob = false; - let finished = false; - let braces = 0; - let prev; - let code; - let token = { value: '', depth: 0, isGlob: false }; - - const eos = () => index >= length; - const peek = () => str.charCodeAt(index + 1); - const advance = () => { - prev = code; - return str.charCodeAt(++index); - }; - - while (index < length) { - code = advance(); - let next; - - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - - if (code === CHAR_LEFT_CURLY_BRACE) { - braceEscaped = true; - } - continue; - } - - if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { - braces++; - - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (code === CHAR_LEFT_CURLY_BRACE) { - braces++; - continue; - } - - if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (braceEscaped !== true && code === CHAR_COMMA) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_RIGHT_CURLY_BRACE) { - braces--; - - if (braces === 0) { - braceEscaped = false; - isBrace = token.isBrace = true; - finished = true; - break; - } - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_FORWARD_SLASH) { - slashes.push(index); - tokens.push(token); - token = { value: '', depth: 0, isGlob: false }; - - if (finished === true) continue; - if (prev === CHAR_DOT && index === (start + 1)) { - start += 2; - continue; - } - - lastIndex = index + 1; - continue; - } - - if (opts.noext !== true) { - const isExtglobChar = code === CHAR_PLUS - || code === CHAR_AT - || code === CHAR_ASTERISK - || code === CHAR_QUESTION_MARK - || code === CHAR_EXCLAMATION_MARK; - - if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - isExtglob = token.isExtglob = true; - finished = true; - if (code === CHAR_EXCLAMATION_MARK && index === start) { - negatedExtglob = true; - } - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - isGlob = token.isGlob = true; - finished = true; - break; - } - } - continue; - } - break; - } - } - - if (code === CHAR_ASTERISK) { - if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_QUESTION_MARK) { - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_LEFT_SQUARE_BRACKET) { - while (eos() !== true && (next = advance())) { - if (next === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - isBracket = token.isBracket = true; - isGlob = token.isGlob = true; - finished = true; - break; - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { - negated = token.negated = true; - start++; - continue; - } - - if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_LEFT_PARENTHESES) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - finished = true; - break; - } - } - continue; - } - break; - } - - if (isGlob === true) { - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - } - - if (opts.noext === true) { - isExtglob = false; - isGlob = false; - } - - let base = str; - let prefix = ''; - let glob = ''; - - if (start > 0) { - prefix = str.slice(0, start); - str = str.slice(start); - lastIndex -= start; - } - - if (base && isGlob === true && lastIndex > 0) { - base = str.slice(0, lastIndex); - glob = str.slice(lastIndex); - } else if (isGlob === true) { - base = ''; - glob = str; - } else { - base = str; - } - - if (base && base !== '' && base !== '/' && base !== str) { - if (isPathSeparator(base.charCodeAt(base.length - 1))) { - base = base.slice(0, -1); - } - } - - if (opts.unescape === true) { - if (glob) glob = utils.removeBackslashes(glob); - - if (base && backslashes === true) { - base = utils.removeBackslashes(base); - } - } - - const state = { - prefix, - input, - start, - base, - glob, - isBrace, - isBracket, - isGlob, - isExtglob, - isGlobstar, - negated, - negatedExtglob - }; - - if (opts.tokens === true) { - state.maxDepth = 0; - if (!isPathSeparator(code)) { - tokens.push(token); - } - state.tokens = tokens; - } - - if (opts.parts === true || opts.tokens === true) { - let prevIndex; - - for (let idx = 0; idx < slashes.length; idx++) { - const n = prevIndex ? prevIndex + 1 : start; - const i = slashes[idx]; - const value = input.slice(n, i); - if (opts.tokens) { - if (idx === 0 && start !== 0) { - tokens[idx].isPrefix = true; - tokens[idx].value = prefix; - } else { - tokens[idx].value = value; - } - depth(tokens[idx]); - state.maxDepth += tokens[idx].depth; - } - if (idx !== 0 || value !== '') { - parts.push(value); - } - prevIndex = i; - } - - if (prevIndex && prevIndex + 1 < input.length) { - const value = input.slice(prevIndex + 1); - parts.push(value); - - if (opts.tokens) { - tokens[tokens.length - 1].value = value; - depth(tokens[tokens.length - 1]); - state.maxDepth += tokens[tokens.length - 1].depth; - } - } - - state.slashes = slashes; - state.parts = parts; - } - - return state; -}; - -module.exports = scan; diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js deleted file mode 100644 index c3ca766..0000000 --- a/node_modules/picomatch/lib/utils.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -const path = require('path'); -const win32 = process.platform === 'win32'; -const { - REGEX_BACKSLASH, - REGEX_REMOVE_BACKSLASH, - REGEX_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_GLOBAL -} = require('./constants'); - -exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); -exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); -exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); -exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); -exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); - -exports.removeBackslashes = str => { - return str.replace(REGEX_REMOVE_BACKSLASH, match => { - return match === '\\' ? '' : match; - }); -}; - -exports.supportsLookbehinds = () => { - const segs = process.version.slice(1).split('.').map(Number); - if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { - return true; - } - return false; -}; - -exports.isWindows = options => { - if (options && typeof options.windows === 'boolean') { - return options.windows; - } - return win32 === true || path.sep === '\\'; -}; - -exports.escapeLast = (input, char, lastIdx) => { - const idx = input.lastIndexOf(char, lastIdx); - if (idx === -1) return input; - if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); - return `${input.slice(0, idx)}\\${input.slice(idx)}`; -}; - -exports.removePrefix = (input, state = {}) => { - let output = input; - if (output.startsWith('./')) { - output = output.slice(2); - state.prefix = './'; - } - return output; -}; - -exports.wrapOutput = (input, state = {}, options = {}) => { - const prepend = options.contains ? '' : '^'; - const append = options.contains ? '' : '$'; - - let output = `${prepend}(?:${input})${append}`; - if (state.negated === true) { - output = `(?:^(?!${output}).*$)`; - } - return output; -}; diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json deleted file mode 100644 index 3db22d4..0000000 --- a/node_modules/picomatch/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "picomatch", - "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", - "version": "2.3.1", - "homepage": "https://github.com/micromatch/picomatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "funding": "https://github.com/sponsors/jonschlinkert", - "repository": "micromatch/picomatch", - "bugs": { - "url": "https://github.com/micromatch/picomatch/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "devDependencies": { - "eslint": "^6.8.0", - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.2.2", - "nyc": "^15.0.0", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "glob", - "match", - "picomatch" - ], - "nyc": { - "reporter": [ - "html", - "lcov", - "text-summary" - ] - }, - "verb": { - "toc": { - "render": true, - "method": "preWrite", - "maxdepth": 3 - }, - "layout": "empty", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "micromatch" - ] - }, - "reflinks": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "micromatch", - "minimatch", - "nanomatch", - "picomatch" - ] - } -} diff --git a/node_modules/pify/index.js b/node_modules/pify/index.js deleted file mode 100644 index 7c720eb..0000000 --- a/node_modules/pify/index.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; - -var processFn = function (fn, P, opts) { - return function () { - var that = this; - var args = new Array(arguments.length); - - for (var i = 0; i < arguments.length; i++) { - args[i] = arguments[i]; - } - - return new P(function (resolve, reject) { - args.push(function (err, result) { - if (err) { - reject(err); - } else if (opts.multiArgs) { - var results = new Array(arguments.length - 1); - - for (var i = 1; i < arguments.length; i++) { - results[i - 1] = arguments[i]; - } - - resolve(results); - } else { - resolve(result); - } - }); - - fn.apply(that, args); - }); - }; -}; - -var pify = module.exports = function (obj, P, opts) { - if (typeof P !== 'function') { - opts = P; - P = Promise; - } - - opts = opts || {}; - opts.exclude = opts.exclude || [/.+Sync$/]; - - var filter = function (key) { - var match = function (pattern) { - return typeof pattern === 'string' ? key === pattern : pattern.test(key); - }; - - return opts.include ? opts.include.some(match) : !opts.exclude.some(match); - }; - - var ret = typeof obj === 'function' ? function () { - if (opts.excludeMain) { - return obj.apply(this, arguments); - } - - return processFn(obj, P, opts).apply(this, arguments); - } : {}; - - return Object.keys(obj).reduce(function (ret, key) { - var x = obj[key]; - - ret[key] = typeof x === 'function' && filter(key) ? processFn(x, P, opts) : x; - - return ret; - }, ret); -}; - -pify.all = pify; diff --git a/node_modules/pify/license b/node_modules/pify/license deleted file mode 100644 index 654d0bf..0000000 --- a/node_modules/pify/license +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/pify/package.json b/node_modules/pify/package.json deleted file mode 100644 index 311d198..0000000 --- a/node_modules/pify/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "pify", - "version": "2.3.0", - "description": "Promisify a callback-style function", - "license": "MIT", - "repository": "sindresorhus/pify", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "xo && ava && npm run optimization-test", - "optimization-test": "node --allow-natives-syntax optimization-test.js" - }, - "files": [ - "index.js" - ], - "keywords": [ - "promise", - "promises", - "promisify", - "denodify", - "denodeify", - "callback", - "cb", - "node", - "then", - "thenify", - "convert", - "transform", - "wrap", - "wrapper", - "bind", - "to", - "async", - "es2015" - ], - "devDependencies": { - "ava": "*", - "pinkie-promise": "^1.0.0", - "v8-natives": "0.0.2", - "xo": "*" - } -} diff --git a/node_modules/pify/readme.md b/node_modules/pify/readme.md deleted file mode 100644 index c79ca8b..0000000 --- a/node_modules/pify/readme.md +++ /dev/null @@ -1,119 +0,0 @@ -# pify [![Build Status](https://travis-ci.org/sindresorhus/pify.svg?branch=master)](https://travis-ci.org/sindresorhus/pify) - -> Promisify a callback-style function - - -## Install - -``` -$ npm install --save pify -``` - - -## Usage - -```js -const fs = require('fs'); -const pify = require('pify'); - -// promisify a single function - -pify(fs.readFile)('package.json', 'utf8').then(data => { - console.log(JSON.parse(data).name); - //=> 'pify' -}); - -// or promisify all methods in a module - -pify(fs).readFile('package.json', 'utf8').then(data => { - console.log(JSON.parse(data).name); - //=> 'pify' -}); -``` - - -## API - -### pify(input, [promiseModule], [options]) - -Returns a promise wrapped version of the supplied function or module. - -#### input - -Type: `function`, `object` - -Callback-style function or module whose methods you want to promisify. - -#### promiseModule - -Type: `function` - -Custom promise module to use instead of the native one. - -Check out [`pinkie-promise`](https://github.com/floatdrop/pinkie-promise) if you need a tiny promise polyfill. - -#### options - -##### multiArgs - -Type: `boolean` -Default: `false` - -By default, the promisified function will only return the second argument from the callback, which works fine for most APIs. This option can be useful for modules like `request` that return multiple arguments. Turning this on will make it return an array of all arguments from the callback, excluding the error argument, instead of just the second argument. - -```js -const request = require('request'); -const pify = require('pify'); - -pify(request, {multiArgs: true})('https://sindresorhus.com').then(result => { - const [httpResponse, body] = result; -}); -``` - -##### include - -Type: `array` of (`string`|`regex`) - -Methods in a module to promisify. Remaining methods will be left untouched. - -##### exclude - -Type: `array` of (`string`|`regex`) -Default: `[/.+Sync$/]` - -Methods in a module **not** to promisify. Methods with names ending with `'Sync'` are excluded by default. - -##### excludeMain - -Type: `boolean` -Default: `false` - -By default, if given module is a function itself, this function will be promisified. Turn this option on if you want to promisify only methods of the module. - -```js -const pify = require('pify'); - -function fn() { - return true; -} - -fn.method = (data, callback) => { - setImmediate(() => { - callback(data, null); - }); -}; - -// promisify methods but not fn() -const promiseFn = pify(fn, {excludeMain: true}); - -if (promiseFn()) { - promiseFn.method('hi').then(data => { - console.log(data); - }); -} -``` - - -## License - -MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/postcss-cli/LICENSE b/node_modules/postcss-cli/LICENSE deleted file mode 100644 index e4d2d4a..0000000 --- a/node_modules/postcss-cli/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -License (MIT) - -Copyright (c) 2016 Michael Ciniawsky - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/postcss-cli/README.md b/node_modules/postcss-cli/README.md deleted file mode 100644 index 9dc9a2d..0000000 --- a/node_modules/postcss-cli/README.md +++ /dev/null @@ -1,149 +0,0 @@ -[![npm][npm]][npm-url] -[![node][node]][node-url] -[![tests][tests]][tests-url] -[![cover][cover]][cover-url] -[![chat][chat]][chat-url] - -
    - - - - -

    PostCSS CLI

    -
    - -

    Install

    - -```bash -npm i -D postcss postcss-cli -``` - -

    Usage

    - -``` -Usage: - postcss [input.css] [OPTIONS] [-o|--output output.css] [--watch|-w] - postcss ... [OPTIONS] --dir [--watch|-w] - postcss [OPTIONS] --dir [--watch|-w] - postcss [OPTIONS] --dir [--watch|-w] - postcss ... [OPTIONS] --replace - -Basic options: - -o, --output Output file [string] - -d, --dir Output directory [string] - -r, --replace Replace (overwrite) the input file [boolean] - -m, --map Create an external sourcemap - --no-map Disable the default inline sourcemaps - -w, --watch Watch files for changes and recompile as needed [boolean] - --verbose Be verbose [boolean] - --env A shortcut for setting NODE_ENV [string] - -Options for use without a config file: - -u, --use List of postcss plugins to use [array] - --parser Custom postcss parser [string] - --stringifier Custom postcss stringifier [string] - --syntax Custom postcss syntax [string] - -Options for use with --dir: - --ext Override the output file extension; for use with --dir [string] - --base Mirror the directory structure relative to this path in the output - directory, for use with --dir [string] - -Advanced options: - --include-dotfiles Enable glob to match files/dirs that begin with "." - [boolean] - --poll Use polling for file watching. Can optionally pass polling - interval; default 100 ms - --config Set a custom directory to look for a config file [string] - -Options: - --version Show version number [boolean] - -h, --help Show help [boolean] - -Examples: - postcss input.css -o output.css Basic usage - postcss src/**/*.css --base src --dir build Glob Pattern & output - cat input.css | postcss -u autoprefixer > output.css Piping input & output - -If no input files are passed, it reads from stdin. If neither -o, --dir, or ---replace is passed, it writes to stdout. - -If there are multiple input files, the --dir or --replace option must be passed. - -Input files may contain globs (e.g. src/**/*.css). If you pass an input -directory, it will process all files in the directory and any subdirectories, -respecting the glob pattern. -``` - -> ℹ️ More details on custom parsers, stringifiers and syntaxes, can be found [here](https://github.com/postcss/postcss#syntaxes). - -### [Config](https://github.com/michael-ciniawsky/postcss-load-config) - -If you need to pass options to your plugins, or have a long plugin chain, you'll want to use a configuration file. - -**postcss.config.js** - -```js -module.exports = { - parser: 'sugarss', - plugins: [ - require('postcss-import')({ ...options }), - require('postcss-url')({ url: 'copy', useHash: true }), - ], -} -``` - -Note that you **can not** set the `from` or `to` options for postcss in the config file. They are set automatically based on the CLI arguments. - -### Context - -For more advanced usage, it's recommended to use a function in `postcss.config.js`; this gives you access to the CLI context to dynamically apply options and plugins **per file** - -| Name | Type | Default | Description | -| :-------: | :--------: | :--------------------------------: | :------------------- | -| `env` | `{String}` | `'development'` | process.env.NODE_ENV | -| `file` | `{Object}` | `dirname, basename, extname` | File | -| `options` | `{Object}` | `map, parser, syntax, stringifier` | PostCSS Options | - -**postcss.config.js** - -```js -module.exports = (ctx) => ({ - map: ctx.options.map, - parser: ctx.file.extname === '.sss' ? 'sugarss' : false, - plugins: { - 'postcss-import': { root: ctx.file.dirname }, - cssnano: ctx.env === 'production' ? {} : false, - }, -}) -``` - -> ⚠️ If you want to set options via CLI, it's mandatory to reference `ctx.options` in `postcss.config.js` - -```bash -postcss input.sss -p sugarss -o output.css -m -``` - -**postcss.config.js** - -```js -module.exports = (ctx) => ({ - map: ctx.options.map, - parser: ctx.options.parser, - plugins: { - 'postcss-import': { root: ctx.file.dirname }, - cssnano: ctx.env === 'production' ? {} : false, - }, -}) -``` - -[npm]: https://img.shields.io/npm/v/postcss-cli.svg -[npm-url]: https://npmjs.com/package/postcss-cli -[node]: https://img.shields.io/node/v/postcss-cli.svg -[node-url]: https://nodejs.org/ -[tests]: https://img.shields.io/github/workflow/status/postcss/postcss-cli/Node.js%20CI/master -[tests-url]: https://github.com/postcss/postcss-cli/actions?query=branch%3Amaster -[cover]: https://img.shields.io/coveralls/postcss/postcss-cli/master.svg -[cover-url]: https://coveralls.io/github/postcss/postcss-cli -[chat]: https://img.shields.io/gitter/room/postcss/postcss.svg -[chat-url]: https://gitter.im/postcss/postcss diff --git a/node_modules/postcss-cli/index.js b/node_modules/postcss-cli/index.js deleted file mode 100755 index f77bf04..0000000 --- a/node_modules/postcss-cli/index.js +++ /dev/null @@ -1,354 +0,0 @@ -#!/usr/bin/env node - -import fs from 'fs-extra' -import path from 'path' - -import prettyHrtime from 'pretty-hrtime' -import stdin from 'get-stdin' -import read from 'read-cache' -import pc from 'picocolors' -import { globby } from 'globby' -import slash from 'slash' -import chokidar from 'chokidar' - -import postcss from 'postcss' -import postcssrc from 'postcss-load-config' -import postcssReporter from 'postcss-reporter/lib/formatter.js' - -import argv from './lib/args.js' -import createDependencyGraph from './lib/DependencyGraph.js' -import getMapfile from './lib/getMapfile.js' - -const reporter = postcssReporter() -const depGraph = createDependencyGraph() - -let input = argv._ -const { dir, output } = argv - -if (argv.map) argv.map = { inline: false } - -let cliConfig - -async function buildCliConfig() { - cliConfig = { - options: { - map: argv.map !== undefined ? argv.map : { inline: true }, - parser: argv.parser ? await import(argv.parser) : undefined, - syntax: argv.syntax ? await import(argv.syntax) : undefined, - stringifier: argv.stringifier - ? await import(argv.stringifier) - : undefined, - }, - plugins: argv.use - ? await Promise.all( - argv.use.map(async (plugin) => { - try { - return (await import(plugin)).default() - } catch (e) { - const msg = e.message || `Cannot find module '${plugin}'` - let prefix = msg.includes(plugin) ? '' : ` (${plugin})` - if (e.name && e.name !== 'Error') prefix += `: ${e.name}` - return error(`Plugin Error${prefix}: ${msg}'`) - } - }), - ) - : [], - } -} - -let configFile - -if (argv.env) process.env.NODE_ENV = argv.env -if (argv.config) argv.config = path.resolve(argv.config) - -let { isTTY } = process.stdin - -if (process.env.FORCE_IS_TTY === 'true') { - isTTY = true -} - -if (argv.watch && isTTY) { - process.stdin.on('end', () => process.exit(0)) - process.stdin.resume() -} - -/* istanbul ignore next */ -if (parseInt(postcss().version) < 8) { - error('Please install PostCSS 8 or above') -} - -buildCliConfig() - .then(() => { - if (argv.watch && !(argv.output || argv.replace || argv.dir)) { - error('Cannot write to stdout in watch mode') - // Need to explicitly exit here, since error() doesn't exit in watch mode - process.exit(1) - } - - if (input && input.length) { - return globby( - input.map((i) => slash(String(i))), - { dot: argv.includeDotfiles }, - ) - } - - if (argv.replace || argv.dir) { - error( - 'Input Error: Cannot use --dir or --replace when reading from stdin', - ) - } - - if (argv.watch) { - error('Input Error: Cannot run in watch mode when reading from stdin') - } - - return ['stdin'] - }) - .then((i) => { - if (!i || !i.length) { - error('Input Error: You must pass a valid list of files to parse') - } - - if (i.length > 1 && !argv.dir && !argv.replace) { - error( - 'Input Error: Must use --dir or --replace with multiple input files', - ) - } - - if (i[0] !== 'stdin') i = i.map((i) => path.resolve(i)) - - input = i - - return files(input) - }) - .then((results) => { - if (argv.watch) { - const printMessage = () => - printVerbose(pc.dim('\nWaiting for file changes...')) - const watcher = chokidar.watch(input.concat(dependencies(results)), { - usePolling: argv.poll, - interval: argv.poll && typeof argv.poll === 'number' ? argv.poll : 100, - awaitWriteFinish: { - stabilityThreshold: 50, - pollInterval: 10, - }, - }) - - if (configFile) watcher.add(configFile) - - watcher.on('ready', printMessage).on('change', (file) => { - let recompile = [] - - if (input.includes(file)) recompile.push(file) - - const dependants = depGraph - .dependantsOf(file) - .concat(getAncestorDirs(file).flatMap(depGraph.dependantsOf)) - - recompile = recompile.concat( - dependants.filter((file) => input.includes(file)), - ) - - if (!recompile.length) recompile = input - - return files([...new Set(recompile)]) - .then((results) => watcher.add(dependencies(results))) - .then(printMessage) - .catch(error) - }) - } - }) - .catch((err) => { - error(err) - - process.exit(1) - }) - -function rc(ctx, path) { - if (argv.use) return Promise.resolve(cliConfig) - - return postcssrc(ctx, path) - .then((rc) => { - if (rc.options.from || rc.options.to) { - error( - 'Config Error: Can not set from or to options in config file, use CLI arguments instead', - ) - } - configFile = rc.file - return rc - }) - .catch((err) => { - if (!err.message.includes('No PostCSS Config found')) throw err - }) -} - -function files(files) { - if (typeof files === 'string') files = [files] - - return Promise.all( - files.map((file) => { - if (file === 'stdin') { - return stdin().then((content) => { - if (!content) return error('Input Error: Did not receive any STDIN') - return css(content, 'stdin') - }) - } - - return read(file).then((content) => css(content, file)) - }), - ) -} - -function css(css, file) { - const ctx = { options: cliConfig.options } - - if (file !== 'stdin') { - ctx.file = { - dirname: path.dirname(file), - basename: path.basename(file), - extname: path.extname(file), - } - - if (!argv.config) argv.config = path.dirname(file) - } - - const relativePath = - file !== 'stdin' ? path.relative(path.resolve(), file) : file - - if (!argv.config) argv.config = process.cwd() - - const time = process.hrtime() - - printVerbose(pc.cyan(`Processing ${pc.bold(relativePath)}...`)) - - return rc(ctx, argv.config) - .then((config) => { - config = config || cliConfig - const options = { ...config.options } - - if (file === 'stdin' && output) file = output - - // TODO: Unit test this - options.from = file === 'stdin' ? path.join(process.cwd(), 'stdin') : file - - if (output || dir || argv.replace) { - const base = argv.base - ? file.replace(path.resolve(argv.base), '') - : path.basename(file) - options.to = output || (argv.replace ? file : path.join(dir, base)) - - if (argv.ext) { - options.to = options.to.replace(path.extname(options.to), argv.ext) - } - - options.to = path.resolve(options.to) - } - - if (!options.to && config.options.map && !config.options.map.inline) { - error( - 'Output Error: Cannot output external sourcemaps when writing to STDOUT', - ) - } - - return postcss(config.plugins) - .process(css, options) - .then((result) => { - const tasks = [] - - if (options.to) { - tasks.push(outputFile(options.to, result.css)) - - if (result.map) { - const mapfile = getMapfile(options) - tasks.push(outputFile(mapfile, result.map.toString())) - } - } else process.stdout.write(result.css, 'utf8') - - return Promise.all(tasks).then(() => { - const prettyTime = prettyHrtime(process.hrtime(time)) - printVerbose( - pc.green( - `Finished ${pc.bold(relativePath)} in ${pc.bold(prettyTime)}`, - ), - ) - - const messages = result.warnings() - if (messages.length) { - console.warn(reporter({ ...result, messages })) - } - - return result - }) - }) - }) - .catch((err) => { - throw err - }) - - async function outputFile(file, string) { - const fileExists = await fs.pathExists(file) - const currentValue = fileExists ? await fs.readFile(file, 'utf8') : null - if (currentValue === string) return - return fs.outputFile(file, string) - } -} - -function dependencies(results) { - if (!Array.isArray(results)) results = [results] - - const messages = [] - - results.forEach((result) => { - if (result.messages <= 0) return - - result.messages - .filter((msg) => - msg.type === 'dependency' || msg.type === 'dir-dependency' ? msg : '', - ) - .map(depGraph.add) - .forEach((dependency) => { - if (dependency.type === 'dir-dependency') { - messages.push( - dependency.glob - ? path.join(dependency.dir, dependency.glob) - : dependency.dir, - ) - } else { - messages.push(dependency.file) - } - }) - }) - - return messages -} - -function printVerbose(message) { - if (argv.verbose) console.warn(message) -} - -function error(err) { - // Seperate error from logging output - if (argv.verbose) console.error() - - if (typeof err === 'string') { - console.error(pc.red(err)) - } else if (err.name === 'CssSyntaxError') { - console.error(err.toString()) - } else { - console.error(err) - } - // Watch mode shouldn't exit on error - if (argv.watch) return - process.exit(1) -} - -// Input: '/imports/components/button.css' -// Output: ['/imports/components', '/imports', '/'] -function getAncestorDirs(fileOrDir) { - const { root } = path.parse(fileOrDir) - if (fileOrDir === root) { - return [] - } - const parentDir = path.dirname(fileOrDir) - return [parentDir, ...getAncestorDirs(parentDir)] -} diff --git a/node_modules/postcss-cli/lib/DependencyGraph.js b/node_modules/postcss-cli/lib/DependencyGraph.js deleted file mode 100644 index efb7ac3..0000000 --- a/node_modules/postcss-cli/lib/DependencyGraph.js +++ /dev/null @@ -1,30 +0,0 @@ -import path from 'path' -import { DepGraph } from 'dependency-graph' - -export default function createDependencyGraph() { - const graph = new DepGraph() - return { - add(message) { - message.parent = path.resolve(message.parent) - graph.addNode(message.parent) - - if (message.type === 'dir-dependency') { - message.dir = path.resolve(message.dir) - graph.addNode(message.dir) - graph.addDependency(message.parent, message.dir) - } else { - message.file = path.resolve(message.file) - graph.addNode(message.file) - graph.addDependency(message.parent, message.file) - } - - return message - }, - dependantsOf(node) { - node = path.resolve(node) - - if (graph.hasNode(node)) return graph.dependantsOf(node) - return [] - }, - } -} diff --git a/node_modules/postcss-cli/lib/args.js b/node_modules/postcss-cli/lib/args.js deleted file mode 100644 index 12d14fa..0000000 --- a/node_modules/postcss-cli/lib/args.js +++ /dev/null @@ -1,115 +0,0 @@ -import yargs from 'yargs' - -const { argv } = yargs(process.argv.slice(2)) - .usage( - `Usage: - $0 [input.css] [OPTIONS] [-o|--output output.css] [--watch|-w] - $0 ... [OPTIONS] --dir [--watch|-w] - $0 [OPTIONS] --dir [--watch|-w] - $0 [OPTIONS] --dir [--watch|-w] - $0 ... [OPTIONS] --replace`, - ) - .group( - ['o', 'd', 'r', 'map', 'no-map', 'watch', 'verbose', 'env'], - 'Basic options:', - ) - .option('o', { - alias: 'output', - desc: 'Output file', - type: 'string', - conflicts: ['dir', 'replace'], - }) - .option('d', { - alias: 'dir', - desc: 'Output directory', - type: 'string', - conflicts: ['output', 'replace'], - }) - .option('r', { - alias: 'replace', - desc: 'Replace (overwrite) the input file', - type: 'boolean', - conflicts: ['output', 'dir'], - }) - .alias('m', 'map') - .describe('map', 'Create an external sourcemap') - .describe('no-map', 'Disable the default inline sourcemaps') - .option('w', { - alias: 'watch', - desc: 'Watch files for changes and recompile as needed', - type: 'boolean', - conflicts: 'replace', - }) - .option('verbose', { - desc: 'Be verbose', - type: 'boolean', - }) - .option('env', { - desc: 'A shortcut for setting NODE_ENV', - type: 'string', - }) - .group( - ['u', 'parser', 'stringifier', 'syntax'], - 'Options for use without a config file:', - ) - .option('u', { - alias: 'use', - desc: 'List of postcss plugins to use', - type: 'array', - }) - .option('parser', { - desc: 'Custom postcss parser', - type: 'string', - }) - .option('stringifier', { - desc: 'Custom postcss stringifier', - type: 'string', - }) - .option('syntax', { - desc: 'Custom postcss syntax', - type: 'string', - }) - .group(['ext', 'base'], 'Options for use with --dir:') - .option('ext', { - desc: 'Override the output file extension; for use with --dir', - type: 'string', - implies: 'dir', - }) - .option('base', { - desc: 'Mirror the directory structure relative to this path in the output directory, for use with --dir', - type: 'string', - implies: 'dir', - }) - .group(['include-dotfiles', 'poll', 'config'], 'Advanced options:') - .option('include-dotfiles', { - desc: 'Enable glob to match files/dirs that begin with "."', - type: 'boolean', - }) - .option('poll', { - desc: 'Use polling for file watching. Can optionally pass polling interval; default 100 ms', - implies: 'watch', - }) - .option('config', { - desc: 'Set a custom directory to look for a config file', - type: 'string', - }) - .alias('h', 'help') - .example('$0 input.css -o output.css', 'Basic usage') - .example('$0 src/**/*.css --base src --dir build', 'Glob Pattern & output') - .example( - 'cat input.css | $0 -u autoprefixer > output.css', - 'Piping input & output', - ) - .epilog( - `If no input files are passed, it reads from stdin. If neither -o, --dir, or --replace is passed, it writes to stdout. - -If there are multiple input files, the --dir or --replace option must be passed. - -Input files may contain globs (e.g. src/**/*.css). If you pass an input directory, it will process all files in the directory and any subdirectories, respecting the glob pattern. - -For more details, please see https://github.com/postcss/postcss-cli`, - ) - -if (argv.ext && argv.ext.indexOf('.') !== 0) argv.ext = `.${argv.ext}` - -export default argv diff --git a/node_modules/postcss-cli/lib/getMapfile.js b/node_modules/postcss-cli/lib/getMapfile.js deleted file mode 100644 index e4d0c93..0000000 --- a/node_modules/postcss-cli/lib/getMapfile.js +++ /dev/null @@ -1,7 +0,0 @@ -import path from 'path' -export default function getMapfile(options) { - if (options.map && typeof options.map.annotation === 'string') { - return `${path.dirname(options.to)}/${options.map.annotation}` - } - return `${options.to}.map` -} diff --git a/node_modules/postcss-cli/package.json b/node_modules/postcss-cli/package.json deleted file mode 100644 index 7ae796a..0000000 --- a/node_modules/postcss-cli/package.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "name": "postcss-cli", - "version": "11.0.0", - "description": "CLI for PostCSS", - "type": "module", - "engines": { - "node": ">=18" - }, - "bin": { - "postcss": "./index.js" - }, - "scripts": { - "ci": "eslint . && c8 ava -v && npm run prettier -- --list-different", - "clean": "node test/helpers/clean.js", - "prettier": "prettier --single-quote --no-semi \"**/*.{js,md}\"", - "format": "npm run prettier -- --write && eslint . --fix", - "pretest": "npm run clean && npm run format", - "test": "c8 ava -v" - }, - "dependencies": { - "chokidar": "^3.3.0", - "dependency-graph": "^0.11.0", - "fs-extra": "^11.0.0", - "get-stdin": "^9.0.0", - "globby": "^14.0.0", - "picocolors": "^1.0.0", - "postcss-load-config": "^5.0.0", - "postcss-reporter": "^7.0.0", - "pretty-hrtime": "^1.0.3", - "read-cache": "^1.0.0", - "slash": "^5.0.0", - "yargs": "^17.0.0" - }, - "devDependencies": { - "ava": "^3.1.0", - "c8": "^8.0.0", - "coveralls": "^3.0.0", - "eslint": "^8.55.0", - "eslint-config-problems": "8.0.0", - "postcss": "^8.0.4", - "postcss-import": "^15.0.0", - "prettier": "~3.1.0", - "sugarss": "^4.0.0", - "uuid": "^9.0.0" - }, - "peerDependencies": { - "postcss": "^8.0.0" - }, - "files": [ - "index.js", - "lib", - "!**/*.test.js" - ], - "keywords": [ - "cli", - "postcss", - "postcss-runner" - ], - "contributors": [ - { - "name": "Michael Ciniawky", - "email": "michael.ciniawsky@gmail.com" - }, - { - "name": "Ryan Zimmermann", - "email": "opensrc@ryanzim.com" - } - ], - "repository": { - "type": "git", - "url": "https://github.com/postcss/postcss-cli.git" - }, - "bugs": { - "url": "https://github.com/postcss/postcss-cli/issues" - }, - "homepage": "https://github.com/postcss/postcss-cli#readme", - "license": "MIT" -} diff --git a/node_modules/postcss-load-config/LICENSE b/node_modules/postcss-load-config/LICENSE deleted file mode 100644 index 458e8a3..0000000 --- a/node_modules/postcss-load-config/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright Michael Ciniawsky - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/postcss-load-config/README.md b/node_modules/postcss-load-config/README.md deleted file mode 100644 index 84472ca..0000000 --- a/node_modules/postcss-load-config/README.md +++ /dev/null @@ -1,471 +0,0 @@ - - -
    - - - - - -

    Load Config

    -
    - -

    Install

    - -```bash -npm i -D postcss-load-config -``` - -

    Usage

    - -```bash -npm i -S|-D postcss-plugin -``` - -Install all required PostCSS plugins and save them to your **package.json** `dependencies`/`devDependencies` - -Then create a PostCSS config file by choosing one of the following formats - -### `package.json` - -Create a **`postcss`** section in your project's **`package.json`** - -``` -Project (Root) - |– client - |– public - | - |- package.json -``` - -```json -{ - "postcss": { - "parser": "sugarss", - "map": false, - "plugins": { - "postcss-plugin": {} - } - } -} -``` - -### `.postcssrc` - -Create a **`.postcssrc`** file in JSON or YAML format - -> ℹ️ It's recommended to use an extension (e.g **`.postcssrc.json`** or **`.postcssrc.yml`**) instead of `.postcssrc` - -``` -Project (Root) - |– client - |– public - | - |- (.postcssrc|.postcssrc.json|.postcssrc.yml) - |- package.json -``` - -**`.postcssrc.json`** -```json -{ - "parser": "sugarss", - "map": false, - "plugins": { - "postcss-plugin": {} - } -} -``` - -**`.postcssrc.yml`** -```yaml -parser: sugarss -map: false -plugins: - postcss-plugin: {} -``` - -### `.postcssrc.js` or `postcss.config.js` - -You may need some logic within your config. -In this case create JS/TS file named: -- `.postcssrc.js` -- `.postcssrc.mjs` -- `.postcssrc.cjs` -- `.postcssrc.ts` -- `.postcssrc.mts` -- `.postcssrc.cts` -- `postcss.config.js` -- `postcss.config.mjs` -- `postcss.config.cjs` -- `postcss.config.ts` -- `postcss.config.mts` -- `postcss.config.cts` - -> [!NOTE] -> For TypeScript configs, you must have [tsx](https://www.npmjs.com/package/tsx) or [jiti](https://www.npmjs.com/package/jiti) installed as a peer dependency. - -``` -Project (Root) - |– client - |– public - |- (.postcssrc|postcss.config).(js|mjs|cjs|ts|mts|cts) - |- package.json -``` - -You can export the config as an `{Object}` - -**.postcssrc.js** -```js -module.exports = { - parser: 'sugarss', - map: false, - plugins: { - 'postcss-plugin': {} - } -} -``` - -Or export a `{Function}` that returns the config (more about the `ctx` param below) - -**.postcssrc.js** -```js -module.exports = (ctx) => ({ - parser: ctx.parser ? 'sugarss' : false, - map: ctx.env === 'development' ? ctx.map : false, - plugins: { - 'postcss-plugin': ctx.options.plugin - } -}) -``` - -Plugins can be loaded either using an `{Object}` or an `{Array}` - -#### `{Object}` - -**.postcssrc.js** -```js -module.exports = ({ env }) => ({ - ...options, - plugins: { - 'postcss-plugin': env === 'production' ? {} : false - } -}) -``` - -> ℹ️ When using an `{Object}`, the key can be a Node.js module name, a path to a JavaScript file that is relative to the directory of the PostCSS config file, or an absolute path to a JavaScript file. - -#### `{Array}` - -**.postcssrc.js** -```js -module.exports = ({ env }) => ({ - ...options, - plugins: [ - env === 'production' ? require('postcss-plugin')() : false - ] -}) -``` -> :warning: When using an `{Array}`, make sure to `require()` each plugin - -

    Options

    - -|Name|Type|Default|Description| -|:--:|:--:|:-----:|:----------| -|[**`to`**](#to)|`{String}`|`undefined`|Destination File Path| -|[**`map`**](#map)|`{String\|Object}`|`false`|Enable/Disable Source Maps| -|[**`from`**](#from)|`{String}`|`undefined`|Source File Path| -|[**`parser`**](#parser)|`{String\|Function}`|`false`|Custom PostCSS Parser| -|[**`syntax`**](#syntax)|`{String\|Function}`|`false`|Custom PostCSS Syntax| -|[**`stringifier`**](#stringifier)|`{String\|Function}`|`false`|Custom PostCSS Stringifier| - -### `parser` - -**.postcssrc.js** -```js -module.exports = { - parser: 'sugarss' -} -``` - -### `syntax` - -**.postcssrc.js** -```js -module.exports = { - syntax: 'postcss-scss' -} -``` - -### `stringifier` - -**.postcssrc.js** -```js -module.exports = { - stringifier: 'midas' -} -``` - -### [**`map`**](https://github.com/postcss/postcss/blob/master/docs/source-maps.md) - -**.postcssrc.js** -```js -module.exports = { - map: 'inline' -} -``` - -> :warning: In most cases `options.from` && `options.to` are set by the third-party which integrates this package (CLI, gulp, webpack). It's unlikely one needs to set/use `options.from` && `options.to` within a config file. Unless you're a third-party plugin author using this module and its Node API directly **dont't set `options.from` && `options.to` yourself** - -### `to` - -```js -module.exports = { - to: 'path/to/dest.css' -} -``` - -### `from` - -```js -module.exports = { - from: 'path/to/src.css' -} -``` - -

    Plugins

    - -### `{} || null` - -The plugin will be loaded with defaults - -```js -'postcss-plugin': {} || null -``` - -**.postcssrc.js** -```js -module.exports = { - plugins: { - 'postcss-plugin': {} || null - } -} -``` - -> :warning: `{}` must be an **empty** `{Object}` literal - -### `{Object}` - -The plugin will be loaded with given options - -```js -'postcss-plugin': { option: '', option: '' } -``` - -**.postcssrc.js** -```js -module.exports = { - plugins: { - 'postcss-plugin': { option: '', option: '' } - } -} -``` - -### `false` - -The plugin will not be loaded - -```js -'postcss-plugin': false -``` - -**.postcssrc.js** -```js -module.exports = { - plugins: { - 'postcss-plugin': false - } -} -``` - -### `Ordering` - -Plugin **execution order** is determined by declaration in the plugins section (**top-down**) - -```js -{ - plugins: { - 'postcss-plugin': {}, // [0] - 'postcss-plugin': {}, // [1] - 'postcss-plugin': {} // [2] - } -} -``` - -

    Context

    - -When using a `{Function}` (`postcss.config.js` or `.postcssrc.js`), it's possible to pass context to `postcss-load-config`, which will be evaluated while loading your config. By default `ctx.env (process.env.NODE_ENV)` and `ctx.cwd (process.cwd())` are available on the `ctx` `{Object}` - -> ℹ️ Most third-party integrations add additional properties to the `ctx` (e.g `postcss-loader`). Check the specific module's README for more information about what is available on the respective `ctx` - -

    Examples

    - -**postcss.config.js** - -```js -module.exports = (ctx) => ({ - parser: ctx.parser ? 'sugarss' : false, - map: ctx.env === 'development' ? ctx.map : false, - plugins: { - 'postcss-import': {}, - 'postcss-nested': {}, - cssnano: ctx.env === 'production' ? {} : false - } -}) -``` - -
    - -
    - -```json -"scripts": { - "build": "NODE_ENV=production node postcss", - "start": "NODE_ENV=development node postcss" -} -``` - -```js -const { readFileSync } = require('fs') - -const postcss = require('postcss') -const postcssrc = require('postcss-load-config') - -const css = readFileSync('index.css', 'utf8') - -const ctx = { parser: true, map: 'inline' } - -postcssrc(ctx).then(({ plugins, options }) => { - postcss(plugins) - .process(css, options) - .then((result) => console.log(result.css)) -}) -``` - -
    - -
    - -```json -"scripts": { - "build": "NODE_ENV=production gulp", - "start": "NODE_ENV=development gulp" -} -``` - -```js -const { task, src, dest, series, watch } = require('gulp') - -const postcss = require('gulp-postcssrc') - -const css = () => { - src('src/*.css') - .pipe(postcss()) - .pipe(dest('dest')) -}) - -task('watch', () => { - watch(['src/*.css', 'postcss.config.js'], css) -}) - -task('default', series(css, 'watch')) -``` - -
    - -
    - -```json -"scripts": { - "build": "NODE_ENV=production webpack", - "start": "NODE_ENV=development webpack-dev-server" -} -``` - -**webpack.config.js** -```js -module.exports = (env) => ({ - module: { - rules: [ - { - test: /\.css$/, - use: [ - 'style-loader', - 'css-loader', - 'postcss-loader' - ] - } - ] - } -}) -``` - -

    Maintainers

    - - - - - - - - -
    - -
    - Michael Ciniawsky -
    - -
    - Mateusz Derks -
    - -

    Contributors

    - - - - - - - - - - -
    - -
    - Ryan Dunckel -
    - -
    - Patrick Gilday -
    - -
    - Dalton Santos -
    - -
    - François Wouts -
    = 18" - }, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "lilconfig": "^3.1.1", - "yaml": "^2.4.2" - }, - "peerDependencies": { - "jiti": ">=1.21.0", - "postcss": ">=8.0.9", - "tsx": "^4.8.1" - }, - "peerDependenciesMeta": { - "jiti": { - "optional": true - }, - "postcss": { - "optional": true - }, - "tsx": { - "optional": true - } - }, - "keywords": [ - "postcss", - "postcssrc", - "postcss.config.js" - ], - "author": "Michael Ciniawky ", - "contributors": [ - "Ryan Dunckel", - "Mateusz Derks", - "Dalton Santos", - "Patrick Gilday", - "François Wouts" - ], - "repository": "postcss/postcss-load-config", - "license": "MIT" -} diff --git a/node_modules/postcss-load-config/src/index.d.ts b/node_modules/postcss-load-config/src/index.d.ts deleted file mode 100644 index 30b7d61..0000000 --- a/node_modules/postcss-load-config/src/index.d.ts +++ /dev/null @@ -1,65 +0,0 @@ -// based on @types/postcss-load-config@2.0.1 -// Type definitions for postcss-load-config 2.1 -import Processor from 'postcss/lib/processor' -import { Plugin, ProcessOptions, Transformer } from 'postcss' -import { Options as ConfigOptions } from 'lilconfig' - -declare function postcssrc( - ctx?: postcssrc.ConfigContext, - path?: string, - options?: ConfigOptions -): Promise - -declare namespace postcssrc { - // In the ConfigContext, these three options can be instances of the - // appropriate class, or strings. If they are strings, postcss-load-config will - // require() them and pass the instances along. - export interface ProcessOptionsPreload { - parser?: string | ProcessOptions['parser'] - stringifier?: string | ProcessOptions['stringifier'] - syntax?: string | ProcessOptions['syntax'] - } - - // The remaining ProcessOptions, sans the three above. - export type RemainingProcessOptions = Pick< - ProcessOptions, - Exclude - > - - // Additional context options that postcss-load-config understands. - export interface Context { - cwd?: string - env?: string - } - - // The full shape of the ConfigContext. - export type ConfigContext = Context & - ProcessOptionsPreload & - RemainingProcessOptions - - // Result of postcssrc is a Promise containing the filename plus the options - // and plugins that are ready to pass on to postcss. - export type ResultPlugin = Plugin | Transformer | Processor - - export interface Result { - file: string - options: ProcessOptions - plugins: ResultPlugin[] - } - - export type ConfigPlugin = Transformer | Plugin | Processor - - export interface Config { - parser?: string | ProcessOptions['parser'] | false - stringifier?: string | ProcessOptions['stringifier'] | false - syntax?: string | ProcessOptions['syntax'] | false - map?: string | false - from?: string - to?: string - plugins?: Array | Record - } - - export type ConfigFn = (ctx: ConfigContext) => Config | Promise -} - -export = postcssrc diff --git a/node_modules/postcss-load-config/src/index.js b/node_modules/postcss-load-config/src/index.js deleted file mode 100644 index 796343c..0000000 --- a/node_modules/postcss-load-config/src/index.js +++ /dev/null @@ -1,164 +0,0 @@ -// @ts-check -const { resolve } = require('node:path') - -const config = require('lilconfig') -const yaml = require('yaml') - -const loadOptions = require('./options.js') -const loadPlugins = require('./plugins.js') -const req = require('./req.js') - -const interopRequireDefault = obj => - obj && obj.__esModule ? obj : { default: obj } - -/** - * Process the result from cosmiconfig - * - * @param {Object} ctx Config Context - * @param {Object} result Cosmiconfig result - * - * @return {Promise} PostCSS Config - */ -async function processResult(ctx, result) { - let file = result.filepath || '' - let projectConfig = interopRequireDefault(result.config).default || {} - - if (typeof projectConfig === 'function') { - projectConfig = projectConfig(ctx) - } else { - projectConfig = Object.assign({}, projectConfig, ctx) - } - - if (!projectConfig.plugins) { - projectConfig.plugins = [] - } - - let res = { - file, - options: await loadOptions(projectConfig, file), - plugins: await loadPlugins(projectConfig, file) - } - delete projectConfig.plugins - return res -} - -/** - * Builds the Config Context - * - * @param {Object} ctx Config Context - * - * @return {Object} Config Context - */ -function createContext(ctx) { - /** - * @type {Object} - * - * @prop {String} cwd=process.cwd() Config search start location - * @prop {String} env=process.env.NODE_ENV Config Enviroment, will be set to `development` by `postcss-load-config` if `process.env.NODE_ENV` is `undefined` - */ - ctx = Object.assign( - { - cwd: process.cwd(), - env: process.env.NODE_ENV - }, - ctx - ) - - if (!ctx.env) { - process.env.NODE_ENV = 'development' - } - - return ctx -} - -async function loader(filepath) { - return req(filepath) -} - -/** @return {import('lilconfig').Options} */ -const withLoaders = (options = {}) => { - let moduleName = 'postcss' - - return { - ...options, - loaders: { - ...options.loaders, - '.cjs': loader, - '.cts': loader, - '.js': loader, - '.mjs': loader, - '.mts': loader, - '.ts': loader, - '.yaml': (_, content) => yaml.parse(content), - '.yml': (_, content) => yaml.parse(content) - }, - searchPlaces: [ - ...(options.searchPlaces || []), - 'package.json', - `.${moduleName}rc`, - `.${moduleName}rc.json`, - `.${moduleName}rc.yaml`, - `.${moduleName}rc.yml`, - `.${moduleName}rc.ts`, - `.${moduleName}rc.cts`, - `.${moduleName}rc.mts`, - `.${moduleName}rc.js`, - `.${moduleName}rc.cjs`, - `.${moduleName}rc.mjs`, - `${moduleName}.config.ts`, - `${moduleName}.config.cts`, - `${moduleName}.config.mts`, - `${moduleName}.config.js`, - `${moduleName}.config.cjs`, - `${moduleName}.config.mjs` - ] - } -} - -/** - * Load Config - * - * @method rc - * - * @param {Object} ctx Config Context - * @param {String} path Config Path - * @param {Object} options Config Options - * - * @return {Promise} config PostCSS Config - */ -function rc(ctx, path, options) { - /** - * @type {Object} The full Config Context - */ - ctx = createContext(ctx) - - /** - * @type {String} `process.cwd()` - */ - path = path ? resolve(path) : process.cwd() - - return config - .lilconfig('postcss', withLoaders(options)) - .search(path) - .then(result => { - if (!result) { - throw new Error(`No PostCSS Config found in: ${path}`) - } - return processResult(ctx, result) - }) -} - -/** - * Autoload Config for PostCSS - * - * @author Michael Ciniawsky @michael-ciniawsky - * @license MIT - * - * @module postcss-load-config - * @version 2.1.0 - * - * @requires comsiconfig - * @requires ./options - * @requires ./plugins - */ -module.exports = rc diff --git a/node_modules/postcss-load-config/src/options.js b/node_modules/postcss-load-config/src/options.js deleted file mode 100644 index 14b03ce..0000000 --- a/node_modules/postcss-load-config/src/options.js +++ /dev/null @@ -1,48 +0,0 @@ -// @ts-check -const req = require('./req.js') - -/** - * Load Options - * - * @private - * @method options - * - * @param {Object} config PostCSS Config - * - * @return {Promise} options PostCSS Options - */ -async function options(config, file) { - if (config.parser && typeof config.parser === 'string') { - try { - config.parser = await req(config.parser, file) - } catch (err) { - throw new Error( - `Loading PostCSS Parser failed: ${err.message}\n\n(@${file})` - ) - } - } - - if (config.syntax && typeof config.syntax === 'string') { - try { - config.syntax = await req(config.syntax, file) - } catch (err) { - throw new Error( - `Loading PostCSS Syntax failed: ${err.message}\n\n(@${file})` - ) - } - } - - if (config.stringifier && typeof config.stringifier === 'string') { - try { - config.stringifier = await req(config.stringifier, file) - } catch (err) { - throw new Error( - `Loading PostCSS Stringifier failed: ${err.message}\n\n(@${file})` - ) - } - } - - return config -} - -module.exports = options diff --git a/node_modules/postcss-load-config/src/plugins.js b/node_modules/postcss-load-config/src/plugins.js deleted file mode 100644 index 2ea4506..0000000 --- a/node_modules/postcss-load-config/src/plugins.js +++ /dev/null @@ -1,90 +0,0 @@ -// @ts-check -const req = require('./req.js') - -/** - * Plugin Loader - * - * @private - * @method load - * - * @param {String} plugin PostCSS Plugin Name - * @param {Object} options PostCSS Plugin Options - * - * @return {Promise} PostCSS Plugin - */ -async function load(plugin, options, file) { - try { - if ( - options === null || - options === undefined || - Object.keys(options).length === 0 - ) { - return await req(plugin, file) - } else { - return (await req(plugin, file))(options) - /* c8 ignore next */ - } - } catch (err) { - throw new Error( - `Loading PostCSS Plugin failed: ${err.message}\n\n(@${file})` - ) - } -} - -/** - * Load Plugins - * - * @private - * @method plugins - * - * @param {Object} config PostCSS Config Plugins - * - * @return {Promise} plugins PostCSS Plugins - */ -async function plugins(config, file) { - let list = [] - - if (Array.isArray(config.plugins)) { - list = config.plugins.filter(Boolean) - } else { - list = Object.entries(config.plugins) - .filter(([, options]) => { - return options !== false - }) - .map(([plugin, options]) => { - return load(plugin, options, file) - }) - list = await Promise.all(list) - } - - if (list.length && list.length > 0) { - list.forEach((plugin, i) => { - if (plugin.default) { - plugin = plugin.default - } - - if (plugin.postcss === true) { - plugin = plugin() - } else if (plugin.postcss) { - plugin = plugin.postcss - } - - if ( - // eslint-disable-next-line - !( - (typeof plugin === 'object' && Array.isArray(plugin.plugins)) || - (typeof plugin === 'object' && plugin.postcssPlugin) || - typeof plugin === 'function' - ) - ) { - throw new TypeError( - `Invalid PostCSS Plugin found at: plugins[${i}]\n\n(@${file})` - ) - } - }) - } - - return list -} - -module.exports = plugins diff --git a/node_modules/postcss-load-config/src/req.js b/node_modules/postcss-load-config/src/req.js deleted file mode 100644 index e7a56ef..0000000 --- a/node_modules/postcss-load-config/src/req.js +++ /dev/null @@ -1,59 +0,0 @@ -// @ts-check -const { createRequire } = require('node:module') -const { pathToFileURL } = require('node:url') - -const TS_EXT_RE = /\.[mc]?ts$/ - -let tsx - -let jiti - -let importError - -/** - * @param {string} name - * @param {string} rootFile - * @returns {Promise} - */ -async function req(name, rootFile = __filename) { - let url = createRequire(rootFile).resolve(name) - - try { - return (await import(`${pathToFileURL(url)}?t=${Date.now()}`)).default - } catch (err) { - if (!TS_EXT_RE.test(url)) { - /* c8 ignore start */ - throw err - } - } - - if (tsx === undefined) { - tsx = await import('tsx/cjs/api').catch(error => { - importError = error - }) - } - - if (tsx) { - let loaded = tsx.require(name, rootFile) - return loaded && '__esModule' in loaded ? loaded.default : loaded - } - - if (jiti === undefined) { - jiti = await import('jiti').then( - m => m.default, - error => { - importError = importError ?? error - } - ) - } - - if (jiti) { - return jiti(rootFile, { interopDefault: true })(name) - } - - throw new Error( - `'tsx' or 'jiti' is required for the TypeScript configuration files. Make sure it is installed\nError: ${importError.message}` - ) -} - -module.exports = req diff --git a/node_modules/postcss-reporter/LICENSE b/node_modules/postcss-reporter/LICENSE deleted file mode 100644 index 6d347c0..0000000 --- a/node_modules/postcss-reporter/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 David Clark - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/postcss-reporter/README.md b/node_modules/postcss-reporter/README.md deleted file mode 100644 index 5222656..0000000 --- a/node_modules/postcss-reporter/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# postcss-reporter - -A PostCSS plugin to `console.log()` the messages (warnings, etc.) registered by other PostCSS plugins. - ---- - -**SEEKING A NEW MAINTAINER!** Interested in contributing to the ecosystem of PostCSS and Stylelint? Please open an issue if you'd like to take over maintenance of this package. - ---- - -## Docs -Read full docs **[here](https://github.com/postcss/postcss-reporter#readme)**. diff --git a/node_modules/postcss-reporter/index.js b/node_modules/postcss-reporter/index.js deleted file mode 100644 index 01116aa..0000000 --- a/node_modules/postcss-reporter/index.js +++ /dev/null @@ -1,4 +0,0 @@ -var reporter = require('./lib/reporter'); - -module.exports = reporter; -module.exports.postcss = true; diff --git a/node_modules/postcss-reporter/lib/formatter.js b/node_modules/postcss-reporter/lib/formatter.js deleted file mode 100644 index 300efad..0000000 --- a/node_modules/postcss-reporter/lib/formatter.js +++ /dev/null @@ -1,93 +0,0 @@ -var pico = require('picocolors'); -var path = require('path'); -var firstBy = require('thenby'); -var util = require('./util'); - -function createSortFunction(positionless, sortByPosition) { - var positionValue = 0 - - if (positionless === 'any') { positionValue = 1; } - if (positionless === 'first') { positionValue = 2; } - if (positionless === 'last') { positionValue = 0; } - - var sortFunction = firstBy((m) => { - if (!m.line) return 1; - return positionValue; - }) - - if (sortByPosition) { - sortFunction = sortFunction.thenBy('line').thenBy('column'); - } - - return sortFunction; -} - -module.exports = function (opts) { - var options = opts || {}; - var sortByPosition = - typeof options.sortByPosition !== 'undefined' - ? options.sortByPosition - : true; - var positionless = options.positionless || 'first'; - - var sortFunction = createSortFunction(positionless, sortByPosition); - - return function (input) { - var messages = input.messages.filter(function (message) { - return typeof message.text === 'string'; - }); - var source = input.source; - - if (!messages.length) return ''; - - var orderedMessages = messages.sort(sortFunction); - - var output = '\n'; - - if (source) { - output += pico.bold(pico.underline(logFrom(source))) + '\n'; - } - - orderedMessages.forEach(function (w) { - output += messageToString(w) + '\n'; - }); - - return output; - - function messageToString(message) { - var location = util.getLocation(message); - var str = ''; - - if (location.line) { - str += pico.bold(location.line); - } - - if (location.column) { - str += pico.bold(':' + location.column); - } - - if (location.line || location.column) { - str += '\t'; - } - - if (!options.noIcon) { - if (message.type === 'warning') { - str += pico.yellow(util.warningSymbol + ' '); - } else if (message.type === 'error') { - str += pico.red(util.errorSymbol + ' '); - } - } - - str += message.text; - if (!options.noPlugin) { - str += pico.yellow(' [' + message.plugin + ']'); - } - return str; - } - - function logFrom(fromValue) { - if (fromValue.charAt(0) === '<') return fromValue; - return path.relative(process.cwd(), fromValue).split(path.sep).join('/'); - } - }; -}; diff --git a/node_modules/postcss-reporter/lib/reporter.js b/node_modules/postcss-reporter/lib/reporter.js deleted file mode 100644 index ae709c7..0000000 --- a/node_modules/postcss-reporter/lib/reporter.js +++ /dev/null @@ -1,111 +0,0 @@ -var defaultFormatter = require('./formatter'); -var pico = require('picocolors'); -var util = require('./util'); - -module.exports = function (opts = {}) { - var formatter = - opts.formatter || - defaultFormatter({ - noIcon: opts.noIcon, - noPlugin: opts.noPlugin, - }); - - var pluginFilter; - if (!opts.plugins) { - // Every plugin - pluginFilter = function () { - return true; - }; - } else if ( - opts.plugins.every(function (plugin) { - return plugin[0] === '!'; - }) - ) { - // Deny list - pluginFilter = function (message) { - return opts.plugins.indexOf('!' + message.plugin) === -1; - }; - } else { - // Allow list - pluginFilter = function (message) { - return opts.plugins.indexOf(message.plugin) !== -1; - }; - } - - var messageFilter = opts.filter || ((message) => message.type === 'warning' || message.type === 'error'); - - return { - postcssPlugin: 'postcss-reporter', - OnceExit(css, { result }) { - var messagesToLog = result.messages - .filter(pluginFilter) - .filter(messageFilter); - - var resultSource = !result.root.source - ? '' - : result.root.source.input.file || result.root.source.input.id; - - let errorCount = 0; - let warningCount = 0; - - var sourceGroupedMessages = messagesToLog.reduce((grouped, message) => { - const key = util.getLocation(message).file || resultSource; - - if (!grouped.hasOwnProperty(key)) { - grouped[key] = []; - } - - if (message.type === 'error') { - errorCount++; - } else if (message.type === 'warning') { - warningCount++; - } - - grouped[key].push(message); - - return grouped; - }, {}); - - var report = ''; - for (const source in sourceGroupedMessages) { - if (sourceGroupedMessages.hasOwnProperty(source)) { - report += formatter({ - messages: sourceGroupedMessages[source], - source: source, - }); - } - } - - if (opts.clearReportedMessages) { - result.messages = result.messages.filter(message => !messagesToLog.includes(message)); - } - - if (opts.clearAllMessages) { - var messagesToClear = result.messages.filter(pluginFilter); - result.messages = result.messages.filter(message => !messagesToClear.includes(message)); - } - - if (!report) return; - - const summaryColor = errorCount > 0 ? 'red' : 'yellow'; - const summarySymbol = errorCount > 0 ? util.errorSymbol : util.warningSymbol; - const summary = `${summarySymbol} ${messagesToLog.length} ${util.plur('problem', messagesToLog.length)} (${errorCount} ${util.plur('error')}, ${warningCount} ${util.plur('warning')})` - - report += `\n ${pico[summaryColor](pico.bold(summary))}\n`; - - console.log(report); - - if (shouldThrowError()) { - throw new Error( - pico.red( - pico.bold('\n** postcss-reporter: warnings or errors were found **') - ) - ); - } - - function shouldThrowError() { - return opts.throwError || errorCount > 0; - } - }, - }; -}; diff --git a/node_modules/postcss-reporter/lib/util.js b/node_modules/postcss-reporter/lib/util.js deleted file mode 100644 index 5433491..0000000 --- a/node_modules/postcss-reporter/lib/util.js +++ /dev/null @@ -1,31 +0,0 @@ -var supportsLargeCharset = - process.platform !== 'win32' || - process.env.CI || - process.env.TERM === 'xterm-256color'; - -exports.getLocation = function (message) { - var messageNode = message.node; - - var location = { - line: message.line, - column: message.column, - }; - - var messageInput = messageNode && messageNode.source && messageNode.source.input; - - if (!messageInput) return location; - - var originLocation = - messageInput.origin && messageInput.origin(message.line, message.column); - if (originLocation) return originLocation; - - location.file = messageInput.file || messageInput.id; - return location; -}; - -exports.plur = function plur(word, count) { - return (count === 1 ? word : `${word}s`); -} - -exports.warningSymbol = supportsLargeCharset ? '⚠' : '!!'; -exports.errorSymbol = supportsLargeCharset ? '✖' : 'xx'; diff --git a/node_modules/postcss-reporter/package.json b/node_modules/postcss-reporter/package.json deleted file mode 100644 index 1d4c48f..0000000 --- a/node_modules/postcss-reporter/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "postcss-reporter", - "version": "7.1.0", - "description": "Log PostCSS messages in the console", - "main": "index.js", - "files": [ - "index.js", - "lib" - ], - "engines": { - "node": ">=10" - }, - "repository": "postcss/postcss-reporter", - "author": { - "name": "David Clark", - "email": "david.dave.clark@gmail.com", - "url": "https://davidtheclark.com" - }, - "license": "MIT", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "peerDependencies": { - "postcss": "^8.1.0" - }, - "dependencies": { - "picocolors": "^1.0.0", - "thenby": "^1.3.4" - } -} diff --git a/node_modules/postcss/LICENSE b/node_modules/postcss/LICENSE deleted file mode 100644 index da057b4..0000000 --- a/node_modules/postcss/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright 2013 Andrey Sitnik - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/postcss/README.md b/node_modules/postcss/README.md deleted file mode 100644 index 939a802..0000000 --- a/node_modules/postcss/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# PostCSS - -Philosopher’s stone, logo of PostCSS - -PostCSS is a tool for transforming styles with JS plugins. -These plugins can lint your CSS, support variables and mixins, -transpile future CSS syntax, inline images, and more. - -PostCSS is used by industry leaders including Wikipedia, Twitter, Alibaba, -and JetBrains. The [Autoprefixer] and [Stylelint] PostCSS plugins is one of the most popular CSS tools. - ---- - -  Made at Evil Martians, product consulting for developer tools. - ---- - -[Abstract Syntax Tree]: https://en.wikipedia.org/wiki/Abstract_syntax_tree -[Evil Martians]: https://evilmartians.com/?utm_source=postcss -[Autoprefixer]: https://github.com/postcss/autoprefixer -[Stylelint]: https://stylelint.io/ -[plugins]: https://github.com/postcss/postcss#plugins - - -## Docs -Read full docs **[here](https://postcss.org/)**. diff --git a/node_modules/postcss/lib/at-rule.d.ts b/node_modules/postcss/lib/at-rule.d.ts deleted file mode 100644 index b2a0e0f..0000000 --- a/node_modules/postcss/lib/at-rule.d.ts +++ /dev/null @@ -1,140 +0,0 @@ -import Container, { - ContainerProps, - ContainerWithChildren -} from './container.js' - -declare namespace AtRule { - export interface AtRuleRaws extends Record { - /** - * The space symbols after the last child of the node to the end of the node. - */ - after?: string - - /** - * The space between the at-rule name and its parameters. - */ - afterName?: string - - /** - * The space symbols before the node. It also stores `*` - * and `_` symbols before the declaration (IE hack). - */ - before?: string - - /** - * The symbols between the last parameter and `{` for rules. - */ - between?: string - - /** - * The rule’s selector with comments. - */ - params?: { - raw: string - value: string - } - - /** - * Contains `true` if the last child has an (optional) semicolon. - */ - semicolon?: boolean - } - - export interface AtRuleProps extends ContainerProps { - /** Name of the at-rule. */ - name: string - /** Parameters following the name of the at-rule. */ - params?: number | string - /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ - raws?: AtRuleRaws - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { AtRule_ as default } -} - -/** - * Represents an at-rule. - * - * ```js - * Once (root, { AtRule }) { - * let media = new AtRule({ name: 'media', params: 'print' }) - * media.append(…) - * root.append(media) - * } - * ``` - * - * If it’s followed in the CSS by a `{}` block, this node will have - * a nodes property representing its children. - * - * ```js - * const root = postcss.parse('@charset "UTF-8"; @media print {}') - * - * const charset = root.first - * charset.type //=> 'atrule' - * charset.nodes //=> undefined - * - * const media = root.last - * media.nodes //=> [] - * ``` - */ -declare class AtRule_ extends Container { - /** - * An array containing the layer’s children. - * - * ```js - * const root = postcss.parse('@layer example { a { color: black } }') - * const layer = root.first - * layer.nodes.length //=> 1 - * layer.nodes[0].selector //=> 'a' - * ``` - * - * Can be `undefinded` if the at-rule has no body. - * - * ```js - * const root = postcss.parse('@layer a, b, c;') - * const layer = root.first - * layer.nodes //=> undefined - * ``` - */ - nodes: Container['nodes'] - parent: ContainerWithChildren | undefined - - raws: AtRule.AtRuleRaws - type: 'atrule' - constructor(defaults?: AtRule.AtRuleProps) - assign(overrides: AtRule.AtRuleProps | object): this - - clone(overrides?: Partial): this - - cloneAfter(overrides?: Partial): this - - cloneBefore(overrides?: Partial): this - /** - * The at-rule’s name immediately follows the `@`. - * - * ```js - * const root = postcss.parse('@media print {}') - * const media = root.first - * media.name //=> 'media' - * ``` - */ - get name(): string - set name(value: string) - /** - * The at-rule’s parameters, the values that follow the at-rule’s name - * but precede any `{}` block. - * - * ```js - * const root = postcss.parse('@media print, screen {}') - * const media = root.first - * media.params //=> 'print, screen' - * ``` - */ - get params(): string - set params(value: string) -} - -declare class AtRule extends AtRule_ {} - -export = AtRule diff --git a/node_modules/postcss/lib/at-rule.js b/node_modules/postcss/lib/at-rule.js deleted file mode 100644 index 9486447..0000000 --- a/node_modules/postcss/lib/at-rule.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict' - -let Container = require('./container') - -class AtRule extends Container { - constructor(defaults) { - super(defaults) - this.type = 'atrule' - } - - append(...children) { - if (!this.proxyOf.nodes) this.nodes = [] - return super.append(...children) - } - - prepend(...children) { - if (!this.proxyOf.nodes) this.nodes = [] - return super.prepend(...children) - } -} - -module.exports = AtRule -AtRule.default = AtRule - -Container.registerAtRule(AtRule) diff --git a/node_modules/postcss/lib/comment.d.ts b/node_modules/postcss/lib/comment.d.ts deleted file mode 100644 index 2b1a156..0000000 --- a/node_modules/postcss/lib/comment.d.ts +++ /dev/null @@ -1,68 +0,0 @@ -import Container from './container.js' -import Node, { NodeProps } from './node.js' - -declare namespace Comment { - export interface CommentRaws extends Record { - /** - * The space symbols before the node. - */ - before?: string - - /** - * The space symbols between `/*` and the comment’s text. - */ - left?: string - - /** - * The space symbols between the comment’s text. - */ - right?: string - } - - export interface CommentProps extends NodeProps { - /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ - raws?: CommentRaws - /** Content of the comment. */ - text: string - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Comment_ as default } -} - -/** - * It represents a class that handles - * [CSS comments](https://developer.mozilla.org/en-US/docs/Web/CSS/Comments) - * - * ```js - * Once (root, { Comment }) { - * const note = new Comment({ text: 'Note: …' }) - * root.append(note) - * } - * ``` - * - * Remember that CSS comments inside selectors, at-rule parameters, - * or declaration values will be stored in the `raws` properties - * explained above. - */ -declare class Comment_ extends Node { - parent: Container | undefined - raws: Comment.CommentRaws - type: 'comment' - constructor(defaults?: Comment.CommentProps) - - assign(overrides: Comment.CommentProps | object): this - - clone(overrides?: Partial): this - cloneAfter(overrides?: Partial): this - cloneBefore(overrides?: Partial): this - /** - * The comment's text. - */ - get text(): string - set text(value: string) -} - -declare class Comment extends Comment_ {} - -export = Comment diff --git a/node_modules/postcss/lib/comment.js b/node_modules/postcss/lib/comment.js deleted file mode 100644 index c566506..0000000 --- a/node_modules/postcss/lib/comment.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -let Node = require('./node') - -class Comment extends Node { - constructor(defaults) { - super(defaults) - this.type = 'comment' - } -} - -module.exports = Comment -Comment.default = Comment diff --git a/node_modules/postcss/lib/container.d.ts b/node_modules/postcss/lib/container.d.ts deleted file mode 100644 index 692bd69..0000000 --- a/node_modules/postcss/lib/container.d.ts +++ /dev/null @@ -1,483 +0,0 @@ -import AtRule from './at-rule.js' -import Comment from './comment.js' -import Declaration from './declaration.js' -import Node, { ChildNode, ChildProps, NodeProps } from './node.js' -import Rule from './rule.js' - -declare namespace Container { - export class ContainerWithChildren< - Child extends Node = ChildNode - > extends Container_ { - nodes: Child[] - } - - export interface ValueOptions { - /** - * String that’s used to narrow down values and speed up the regexp search. - */ - fast?: string - - /** - * An array of property names. - */ - props?: readonly string[] - } - - export interface ContainerProps extends NodeProps { - nodes?: readonly (ChildProps | Node)[] - } - - /** - * All types that can be passed into container methods to create or add a new - * child node. - */ - export type NewChild = - | ChildProps - | Node - | readonly ChildProps[] - | readonly Node[] - | readonly string[] - | string - | undefined - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Container_ as default } -} - -/** - * The `Root`, `AtRule`, and `Rule` container nodes - * inherit some common methods to help work with their children. - * - * Note that all containers can store any content. If you write a rule inside - * a rule, PostCSS will parse it. - */ -declare abstract class Container_ extends Node { - /** - * An array containing the container’s children. - * - * ```js - * const root = postcss.parse('a { color: black }') - * root.nodes.length //=> 1 - * root.nodes[0].selector //=> 'a' - * root.nodes[0].nodes[0].prop //=> 'color' - * ``` - */ - nodes: Child[] | undefined - - /** - * An internal method that converts a {@link NewChild} into a list of actual - * child nodes that can then be added to this container. - * - * This ensures that the nodes' parent is set to this container, that they use - * the correct prototype chain, and that they're marked as dirty. - * - * @param mnodes The new node or nodes to add. - * @param sample A node from whose raws the new node's `before` raw should be - * taken. - * @param type This should be set to `'prepend'` if the new nodes will be - * inserted at the beginning of the container. - * @hidden - */ - protected normalize( - nodes: Container.NewChild, - sample: Node | undefined, - type?: 'prepend' | false - ): Child[] - - /** - * Inserts new nodes to the end of the container. - * - * ```js - * const decl1 = new Declaration({ prop: 'color', value: 'black' }) - * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) - * rule.append(decl1, decl2) - * - * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule - * root.append({ selector: 'a' }) // rule - * rule.append({ prop: 'color', value: 'black' }) // declaration - * rule.append({ text: 'Comment' }) // comment - * - * root.append('a {}') - * root.first.append('color: black; z-index: 1') - * ``` - * - * @param nodes New nodes. - * @return This node for methods chain. - */ - append(...nodes: Container.NewChild[]): this - assign(overrides: Container.ContainerProps | object): this - clone(overrides?: Partial): this - cloneAfter(overrides?: Partial): this - - cloneBefore(overrides?: Partial): this - - /** - * Iterates through the container’s immediate children, - * calling `callback` for each child. - * - * Returning `false` in the callback will break iteration. - * - * This method only iterates through the container’s immediate children. - * If you need to recursively iterate through all the container’s descendant - * nodes, use `Container#walk`. - * - * Unlike the for `{}`-cycle or `Array#forEach` this iterator is safe - * if you are mutating the array of child nodes during iteration. - * PostCSS will adjust the current index to match the mutations. - * - * ```js - * const root = postcss.parse('a { color: black; z-index: 1 }') - * const rule = root.first - * - * for (const decl of rule.nodes) { - * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) - * // Cycle will be infinite, because cloneBefore moves the current node - * // to the next index - * } - * - * rule.each(decl => { - * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) - * // Will be executed only for color and z-index - * }) - * ``` - * - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - each( - callback: (node: Child, index: number) => false | void - ): false | undefined - /** - * Returns `true` if callback returns `true` - * for all of the container’s children. - * - * ```js - * const noPrefixes = rule.every(i => i.prop[0] !== '-') - * ``` - * - * @param condition Iterator returns true or false. - * @return Is every child pass condition. - */ - every( - condition: (node: Child, index: number, nodes: Child[]) => boolean - ): boolean - - /** - * Returns a `child`’s index within the `Container#nodes` array. - * - * ```js - * rule.index( rule.nodes[2] ) //=> 2 - * ``` - * - * @param child Child of the current container. - * @return Child index. - */ - index(child: Child | number): number - /** - * Insert new node after old node within the container. - * - * @param oldNode Child or child’s index. - * @param newNode New node. - * @return This node for methods chain. - */ - insertAfter(oldNode: Child | number, newNode: Container.NewChild): this - - /** - * Insert new node before old node within the container. - * - * ```js - * rule.insertBefore(decl, decl.clone({ prop: '-webkit-' + decl.prop })) - * ``` - * - * @param oldNode Child or child’s index. - * @param newNode New node. - * @return This node for methods chain. - */ - insertBefore(oldNode: Child | number, newNode: Container.NewChild): this - - /** - * Traverses the container’s descendant nodes, calling callback - * for each comment node. - * - * Like `Container#each`, this method is safe - * to use if you are mutating arrays during iteration. - * - * ```js - * root.walkComments(comment => { - * comment.remove() - * }) - * ``` - * - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - - /** - * Inserts new nodes to the start of the container. - * - * ```js - * const decl1 = new Declaration({ prop: 'color', value: 'black' }) - * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) - * rule.prepend(decl1, decl2) - * - * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule - * root.append({ selector: 'a' }) // rule - * rule.append({ prop: 'color', value: 'black' }) // declaration - * rule.append({ text: 'Comment' }) // comment - * - * root.append('a {}') - * root.first.append('color: black; z-index: 1') - * ``` - * - * @param nodes New nodes. - * @return This node for methods chain. - */ - prepend(...nodes: Container.NewChild[]): this - /** - * Add child to the end of the node. - * - * ```js - * rule.push(new Declaration({ prop: 'color', value: 'black' })) - * ``` - * - * @param child New node. - * @return This node for methods chain. - */ - push(child: Child): this - - /** - * Removes all children from the container - * and cleans their parent properties. - * - * ```js - * rule.removeAll() - * rule.nodes.length //=> 0 - * ``` - * - * @return This node for methods chain. - */ - removeAll(): this - - /** - * Removes node from the container and cleans the parent properties - * from the node and its children. - * - * ```js - * rule.nodes.length //=> 5 - * rule.removeChild(decl) - * rule.nodes.length //=> 4 - * decl.parent //=> undefined - * ``` - * - * @param child Child or child’s index. - * @return This node for methods chain. - */ - removeChild(child: Child | number): this - - replaceValues( - pattern: RegExp | string, - replaced: { (substring: string, ...args: any[]): string } | string - ): this - - /** - * Passes all declaration values within the container that match pattern - * through callback, replacing those values with the returned result - * of callback. - * - * This method is useful if you are using a custom unit or function - * and need to iterate through all values. - * - * ```js - * root.replaceValues(/\d+rem/, { fast: 'rem' }, string => { - * return 15 * parseInt(string) + 'px' - * }) - * ``` - * - * @param pattern Replace pattern. - * @param {object} options Options to speed up the search. - * @param replaced String to replace pattern or callback - * that returns a new value. The callback - * will receive the same arguments - * as those passed to a function parameter - * of `String#replace`. - * @return This node for methods chain. - */ - replaceValues( - pattern: RegExp | string, - options: Container.ValueOptions, - replaced: { (substring: string, ...args: any[]): string } | string - ): this - - /** - * Returns `true` if callback returns `true` for (at least) one - * of the container’s children. - * - * ```js - * const hasPrefix = rule.some(i => i.prop[0] === '-') - * ``` - * - * @param condition Iterator returns true or false. - * @return Is some child pass condition. - */ - some( - condition: (node: Child, index: number, nodes: Child[]) => boolean - ): boolean - - /** - * Traverses the container’s descendant nodes, calling callback - * for each node. - * - * Like container.each(), this method is safe to use - * if you are mutating arrays during iteration. - * - * If you only need to iterate through the container’s immediate children, - * use `Container#each`. - * - * ```js - * root.walk(node => { - * // Traverses all descendant nodes. - * }) - * ``` - * - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - walk( - callback: (node: ChildNode, index: number) => false | void - ): false | undefined - - /** - * Traverses the container’s descendant nodes, calling callback - * for each at-rule node. - * - * If you pass a filter, iteration will only happen over at-rules - * that have matching names. - * - * Like `Container#each`, this method is safe - * to use if you are mutating arrays during iteration. - * - * ```js - * root.walkAtRules(rule => { - * if (isOld(rule.name)) rule.remove() - * }) - * - * let first = false - * root.walkAtRules('charset', rule => { - * if (!first) { - * first = true - * } else { - * rule.remove() - * } - * }) - * ``` - * - * @param name String or regular expression to filter at-rules by name. - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - walkAtRules( - nameFilter: RegExp | string, - callback: (atRule: AtRule, index: number) => false | void - ): false | undefined - - walkAtRules( - callback: (atRule: AtRule, index: number) => false | void - ): false | undefined - walkComments( - callback: (comment: Comment, indexed: number) => false | void - ): false | undefined - - walkComments( - callback: (comment: Comment, indexed: number) => false | void - ): false | undefined - - /** - * Traverses the container’s descendant nodes, calling callback - * for each declaration node. - * - * If you pass a filter, iteration will only happen over declarations - * with matching properties. - * - * ```js - * root.walkDecls(decl => { - * checkPropertySupport(decl.prop) - * }) - * - * root.walkDecls('border-radius', decl => { - * decl.remove() - * }) - * - * root.walkDecls(/^background/, decl => { - * decl.value = takeFirstColorFromGradient(decl.value) - * }) - * ``` - * - * Like `Container#each`, this method is safe - * to use if you are mutating arrays during iteration. - * - * @param prop String or regular expression to filter declarations - * by property name. - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - walkDecls( - propFilter: RegExp | string, - callback: (decl: Declaration, index: number) => false | void - ): false | undefined - - walkDecls( - callback: (decl: Declaration, index: number) => false | void - ): false | undefined - - /** - * Traverses the container’s descendant nodes, calling callback - * for each rule node. - * - * If you pass a filter, iteration will only happen over rules - * with matching selectors. - * - * Like `Container#each`, this method is safe - * to use if you are mutating arrays during iteration. - * - * ```js - * const selectors = [] - * root.walkRules(rule => { - * selectors.push(rule.selector) - * }) - * console.log(`Your CSS uses ${ selectors.length } selectors`) - * ``` - * - * @param selector String or regular expression to filter rules by selector. - * @param callback Iterator receives each node and index. - * @return Returns `false` if iteration was broke. - */ - walkRules( - selectorFilter: RegExp | string, - callback: (rule: Rule, index: number) => false | void - ): false | undefined - walkRules( - callback: (rule: Rule, index: number) => false | void - ): false | undefined - /** - * The container’s first child. - * - * ```js - * rule.first === rules.nodes[0] - * ``` - */ - get first(): Child | undefined - /** - * The container’s last child. - * - * ```js - * rule.last === rule.nodes[rule.nodes.length - 1] - * ``` - */ - get last(): Child | undefined -} - -declare class Container< - Child extends Node = ChildNode -> extends Container_ {} - -export = Container diff --git a/node_modules/postcss/lib/container.js b/node_modules/postcss/lib/container.js deleted file mode 100644 index 8b9e1ef..0000000 --- a/node_modules/postcss/lib/container.js +++ /dev/null @@ -1,447 +0,0 @@ -'use strict' - -let Comment = require('./comment') -let Declaration = require('./declaration') -let Node = require('./node') -let { isClean, my } = require('./symbols') - -let AtRule, parse, Root, Rule - -function cleanSource(nodes) { - return nodes.map(i => { - if (i.nodes) i.nodes = cleanSource(i.nodes) - delete i.source - return i - }) -} - -function markTreeDirty(node) { - node[isClean] = false - if (node.proxyOf.nodes) { - for (let i of node.proxyOf.nodes) { - markTreeDirty(i) - } - } -} - -class Container extends Node { - append(...children) { - for (let child of children) { - let nodes = this.normalize(child, this.last) - for (let node of nodes) this.proxyOf.nodes.push(node) - } - - this.markDirty() - - return this - } - - cleanRaws(keepBetween) { - super.cleanRaws(keepBetween) - if (this.nodes) { - for (let node of this.nodes) node.cleanRaws(keepBetween) - } - } - - each(callback) { - if (!this.proxyOf.nodes) return undefined - let iterator = this.getIterator() - - let index, result - while (this.indexes[iterator] < this.proxyOf.nodes.length) { - index = this.indexes[iterator] - result = callback(this.proxyOf.nodes[index], index) - if (result === false) break - - this.indexes[iterator] += 1 - } - - delete this.indexes[iterator] - return result - } - - every(condition) { - return this.nodes.every(condition) - } - - getIterator() { - if (!this.lastEach) this.lastEach = 0 - if (!this.indexes) this.indexes = {} - - this.lastEach += 1 - let iterator = this.lastEach - this.indexes[iterator] = 0 - - return iterator - } - - getProxyProcessor() { - return { - get(node, prop) { - if (prop === 'proxyOf') { - return node - } else if (!node[prop]) { - return node[prop] - } else if ( - prop === 'each' || - (typeof prop === 'string' && prop.startsWith('walk')) - ) { - return (...args) => { - return node[prop]( - ...args.map(i => { - if (typeof i === 'function') { - return (child, index) => i(child.toProxy(), index) - } else { - return i - } - }) - ) - } - } else if (prop === 'every' || prop === 'some') { - return cb => { - return node[prop]((child, ...other) => - cb(child.toProxy(), ...other) - ) - } - } else if (prop === 'root') { - return () => node.root().toProxy() - } else if (prop === 'nodes') { - return node.nodes.map(i => i.toProxy()) - } else if (prop === 'first' || prop === 'last') { - return node[prop].toProxy() - } else { - return node[prop] - } - }, - - set(node, prop, value) { - if (node[prop] === value) return true - node[prop] = value - if (prop === 'name' || prop === 'params' || prop === 'selector') { - node.markDirty() - } - return true - } - } - } - - index(child) { - if (typeof child === 'number') return child - if (child.proxyOf) child = child.proxyOf - return this.proxyOf.nodes.indexOf(child) - } - - insertAfter(exist, add) { - let existIndex = this.index(exist) - let nodes = this.normalize(add, this.proxyOf.nodes[existIndex]).reverse() - existIndex = this.index(exist) - for (let node of nodes) this.proxyOf.nodes.splice(existIndex + 1, 0, node) - - let index - for (let id in this.indexes) { - index = this.indexes[id] - if (existIndex < index) { - this.indexes[id] = index + nodes.length - } - } - - this.markDirty() - - return this - } - - insertBefore(exist, add) { - let existIndex = this.index(exist) - let type = existIndex === 0 ? 'prepend' : false - let nodes = this.normalize( - add, - this.proxyOf.nodes[existIndex], - type - ).reverse() - existIndex = this.index(exist) - for (let node of nodes) this.proxyOf.nodes.splice(existIndex, 0, node) - - let index - for (let id in this.indexes) { - index = this.indexes[id] - if (existIndex <= index) { - this.indexes[id] = index + nodes.length - } - } - - this.markDirty() - - return this - } - - normalize(nodes, sample) { - if (typeof nodes === 'string') { - nodes = cleanSource(parse(nodes).nodes) - } else if (typeof nodes === 'undefined') { - nodes = [] - } else if (Array.isArray(nodes)) { - nodes = nodes.slice(0) - for (let i of nodes) { - if (i.parent) i.parent.removeChild(i, 'ignore') - } - } else if (nodes.type === 'root' && this.type !== 'document') { - nodes = nodes.nodes.slice(0) - for (let i of nodes) { - if (i.parent) i.parent.removeChild(i, 'ignore') - } - } else if (nodes.type) { - nodes = [nodes] - } else if (nodes.prop) { - if (typeof nodes.value === 'undefined') { - throw new Error('Value field is missed in node creation') - } else if (typeof nodes.value !== 'string') { - nodes.value = String(nodes.value) - } - nodes = [new Declaration(nodes)] - } else if (nodes.selector || nodes.selectors) { - nodes = [new Rule(nodes)] - } else if (nodes.name) { - nodes = [new AtRule(nodes)] - } else if (nodes.text) { - nodes = [new Comment(nodes)] - } else { - throw new Error('Unknown node type in node creation') - } - - let processed = nodes.map(i => { - /* c8 ignore next */ - if (!i[my]) Container.rebuild(i) - i = i.proxyOf - if (i.parent) i.parent.removeChild(i) - if (i[isClean]) markTreeDirty(i) - - if (!i.raws) i.raws = {} - if (typeof i.raws.before === 'undefined') { - if (sample && typeof sample.raws.before !== 'undefined') { - i.raws.before = sample.raws.before.replace(/\S/g, '') - } - } - i.parent = this.proxyOf - return i - }) - - return processed - } - - prepend(...children) { - children = children.reverse() - for (let child of children) { - let nodes = this.normalize(child, this.first, 'prepend').reverse() - for (let node of nodes) this.proxyOf.nodes.unshift(node) - for (let id in this.indexes) { - this.indexes[id] = this.indexes[id] + nodes.length - } - } - - this.markDirty() - - return this - } - - push(child) { - child.parent = this - this.proxyOf.nodes.push(child) - return this - } - - removeAll() { - for (let node of this.proxyOf.nodes) node.parent = undefined - this.proxyOf.nodes = [] - - this.markDirty() - - return this - } - - removeChild(child) { - child = this.index(child) - this.proxyOf.nodes[child].parent = undefined - this.proxyOf.nodes.splice(child, 1) - - let index - for (let id in this.indexes) { - index = this.indexes[id] - if (index >= child) { - this.indexes[id] = index - 1 - } - } - - this.markDirty() - - return this - } - - replaceValues(pattern, opts, callback) { - if (!callback) { - callback = opts - opts = {} - } - - this.walkDecls(decl => { - if (opts.props && !opts.props.includes(decl.prop)) return - if (opts.fast && !decl.value.includes(opts.fast)) return - - decl.value = decl.value.replace(pattern, callback) - }) - - this.markDirty() - - return this - } - - some(condition) { - return this.nodes.some(condition) - } - - walk(callback) { - return this.each((child, i) => { - let result - try { - result = callback(child, i) - } catch (e) { - throw child.addToError(e) - } - if (result !== false && child.walk) { - result = child.walk(callback) - } - - return result - }) - } - - walkAtRules(name, callback) { - if (!callback) { - callback = name - return this.walk((child, i) => { - if (child.type === 'atrule') { - return callback(child, i) - } - }) - } - if (name instanceof RegExp) { - return this.walk((child, i) => { - if (child.type === 'atrule' && name.test(child.name)) { - return callback(child, i) - } - }) - } - return this.walk((child, i) => { - if (child.type === 'atrule' && child.name === name) { - return callback(child, i) - } - }) - } - - walkComments(callback) { - return this.walk((child, i) => { - if (child.type === 'comment') { - return callback(child, i) - } - }) - } - - walkDecls(prop, callback) { - if (!callback) { - callback = prop - return this.walk((child, i) => { - if (child.type === 'decl') { - return callback(child, i) - } - }) - } - if (prop instanceof RegExp) { - return this.walk((child, i) => { - if (child.type === 'decl' && prop.test(child.prop)) { - return callback(child, i) - } - }) - } - return this.walk((child, i) => { - if (child.type === 'decl' && child.prop === prop) { - return callback(child, i) - } - }) - } - - walkRules(selector, callback) { - if (!callback) { - callback = selector - - return this.walk((child, i) => { - if (child.type === 'rule') { - return callback(child, i) - } - }) - } - if (selector instanceof RegExp) { - return this.walk((child, i) => { - if (child.type === 'rule' && selector.test(child.selector)) { - return callback(child, i) - } - }) - } - return this.walk((child, i) => { - if (child.type === 'rule' && child.selector === selector) { - return callback(child, i) - } - }) - } - - get first() { - if (!this.proxyOf.nodes) return undefined - return this.proxyOf.nodes[0] - } - - get last() { - if (!this.proxyOf.nodes) return undefined - return this.proxyOf.nodes[this.proxyOf.nodes.length - 1] - } -} - -Container.registerParse = dependant => { - parse = dependant -} - -Container.registerRule = dependant => { - Rule = dependant -} - -Container.registerAtRule = dependant => { - AtRule = dependant -} - -Container.registerRoot = dependant => { - Root = dependant -} - -module.exports = Container -Container.default = Container - -/* c8 ignore start */ -Container.rebuild = node => { - if (node.type === 'atrule') { - Object.setPrototypeOf(node, AtRule.prototype) - } else if (node.type === 'rule') { - Object.setPrototypeOf(node, Rule.prototype) - } else if (node.type === 'decl') { - Object.setPrototypeOf(node, Declaration.prototype) - } else if (node.type === 'comment') { - Object.setPrototypeOf(node, Comment.prototype) - } else if (node.type === 'root') { - Object.setPrototypeOf(node, Root.prototype) - } - - node[my] = true - - if (node.nodes) { - node.nodes.forEach(child => { - Container.rebuild(child) - }) - } -} -/* c8 ignore stop */ diff --git a/node_modules/postcss/lib/css-syntax-error.d.ts b/node_modules/postcss/lib/css-syntax-error.d.ts deleted file mode 100644 index e540d84..0000000 --- a/node_modules/postcss/lib/css-syntax-error.d.ts +++ /dev/null @@ -1,248 +0,0 @@ -import { FilePosition } from './input.js' - -declare namespace CssSyntaxError { - /** - * A position that is part of a range. - */ - export interface RangePosition { - /** - * The column number in the input. - */ - column: number - - /** - * The line number in the input. - */ - line: number - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { CssSyntaxError_ as default } -} - -/** - * The CSS parser throws this error for broken CSS. - * - * Custom parsers can throw this error for broken custom syntax using - * the `Node#error` method. - * - * PostCSS will use the input source map to detect the original error location. - * If you wrote a Sass file, compiled it to CSS and then parsed it with PostCSS, - * PostCSS will show the original position in the Sass file. - * - * If you need the position in the PostCSS input - * (e.g., to debug the previous compiler), use `error.input.file`. - * - * ```js - * // Raising error from plugin - * throw node.error('Unknown variable', { plugin: 'postcss-vars' }) - * ``` - * - * ```js - * // Catching and checking syntax error - * try { - * postcss.parse('a{') - * } catch (error) { - * if (error.name === 'CssSyntaxError') { - * error //=> CssSyntaxError - * } - * } - * ``` - */ -declare class CssSyntaxError_ extends Error { - /** - * Source column of the error. - * - * ```js - * error.column //=> 1 - * error.input.column //=> 4 - * ``` - * - * PostCSS will use the input source map to detect the original location. - * If you need the position in the PostCSS input, use `error.input.column`. - */ - column?: number - - /** - * Source column of the error's end, exclusive. Provided if the error pertains - * to a range. - * - * ```js - * error.endColumn //=> 1 - * error.input.endColumn //=> 4 - * ``` - * - * PostCSS will use the input source map to detect the original location. - * If you need the position in the PostCSS input, use `error.input.endColumn`. - */ - endColumn?: number - - /** - * Source line of the error's end, exclusive. Provided if the error pertains - * to a range. - * - * ```js - * error.endLine //=> 3 - * error.input.endLine //=> 4 - * ``` - * - * PostCSS will use the input source map to detect the original location. - * If you need the position in the PostCSS input, use `error.input.endLine`. - */ - endLine?: number - - /** - * Absolute path to the broken file. - * - * ```js - * error.file //=> 'a.sass' - * error.input.file //=> 'a.css' - * ``` - * - * PostCSS will use the input source map to detect the original location. - * If you need the position in the PostCSS input, use `error.input.file`. - */ - file?: string - - /** - * Input object with PostCSS internal information - * about input file. If input has source map - * from previous tool, PostCSS will use origin - * (for example, Sass) source. You can use this - * object to get PostCSS input source. - * - * ```js - * error.input.file //=> 'a.css' - * error.file //=> 'a.sass' - * ``` - */ - input?: FilePosition - - /** - * Source line of the error. - * - * ```js - * error.line //=> 2 - * error.input.line //=> 4 - * ``` - * - * PostCSS will use the input source map to detect the original location. - * If you need the position in the PostCSS input, use `error.input.line`. - */ - line?: number - - /** - * Full error text in the GNU error format - * with plugin, file, line and column. - * - * ```js - * error.message //=> 'a.css:1:1: Unclosed block' - * ``` - */ - message: string - - /** - * Always equal to `'CssSyntaxError'`. You should always check error type - * by `error.name === 'CssSyntaxError'` - * instead of `error instanceof CssSyntaxError`, - * because npm could have several PostCSS versions. - * - * ```js - * if (error.name === 'CssSyntaxError') { - * error //=> CssSyntaxError - * } - * ``` - */ - name: 'CssSyntaxError' - - /** - * Plugin name, if error came from plugin. - * - * ```js - * error.plugin //=> 'postcss-vars' - * ``` - */ - plugin?: string - - /** - * Error message. - * - * ```js - * error.message //=> 'Unclosed block' - * ``` - */ - reason: string - - /** - * Source code of the broken file. - * - * ```js - * error.source //=> 'a { b {} }' - * error.input.source //=> 'a b { }' - * ``` - */ - source?: string - - stack: string - - /** - * Instantiates a CSS syntax error. Can be instantiated for a single position - * or for a range. - * @param message Error message. - * @param lineOrStartPos If for a single position, the line number, or if for - * a range, the inclusive start position of the error. - * @param columnOrEndPos If for a single position, the column number, or if for - * a range, the exclusive end position of the error. - * @param source Source code of the broken file. - * @param file Absolute path to the broken file. - * @param plugin PostCSS plugin name, if error came from plugin. - */ - constructor( - message: string, - lineOrStartPos?: CssSyntaxError.RangePosition | number, - columnOrEndPos?: CssSyntaxError.RangePosition | number, - source?: string, - file?: string, - plugin?: string - ) - - /** - * Returns a few lines of CSS source that caused the error. - * - * If the CSS has an input source map without `sourceContent`, - * this method will return an empty string. - * - * ```js - * error.showSourceCode() //=> " 4 | } - * // 5 | a { - * // > 6 | bad - * // | ^ - * // 7 | } - * // 8 | b {" - * ``` - * - * @param color Whether arrow will be colored red by terminal - * color codes. By default, PostCSS will detect - * color support by `process.stdout.isTTY` - * and `process.env.NODE_DISABLE_COLORS`. - * @return Few lines of CSS source that caused the error. - */ - showSourceCode(color?: boolean): string - - /** - * Returns error position, message and source code of the broken part. - * - * ```js - * error.toString() //=> "CssSyntaxError: app.css:1:1: Unclosed block - * // > 1 | a { - * // | ^" - * ``` - * - * @return Error position, message and source code. - */ - toString(): string -} - -declare class CssSyntaxError extends CssSyntaxError_ {} - -export = CssSyntaxError diff --git a/node_modules/postcss/lib/css-syntax-error.js b/node_modules/postcss/lib/css-syntax-error.js deleted file mode 100644 index 275a4f6..0000000 --- a/node_modules/postcss/lib/css-syntax-error.js +++ /dev/null @@ -1,133 +0,0 @@ -'use strict' - -let pico = require('picocolors') - -let terminalHighlight = require('./terminal-highlight') - -class CssSyntaxError extends Error { - constructor(message, line, column, source, file, plugin) { - super(message) - this.name = 'CssSyntaxError' - this.reason = message - - if (file) { - this.file = file - } - if (source) { - this.source = source - } - if (plugin) { - this.plugin = plugin - } - if (typeof line !== 'undefined' && typeof column !== 'undefined') { - if (typeof line === 'number') { - this.line = line - this.column = column - } else { - this.line = line.line - this.column = line.column - this.endLine = column.line - this.endColumn = column.column - } - } - - this.setMessage() - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, CssSyntaxError) - } - } - - setMessage() { - this.message = this.plugin ? this.plugin + ': ' : '' - this.message += this.file ? this.file : '' - if (typeof this.line !== 'undefined') { - this.message += ':' + this.line + ':' + this.column - } - this.message += ': ' + this.reason - } - - showSourceCode(color) { - if (!this.source) return '' - - let css = this.source - if (color == null) color = pico.isColorSupported - - let aside = text => text - let mark = text => text - let highlight = text => text - if (color) { - let { bold, gray, red } = pico.createColors(true) - mark = text => bold(red(text)) - aside = text => gray(text) - if (terminalHighlight) { - highlight = text => terminalHighlight(text) - } - } - - let lines = css.split(/\r?\n/) - let start = Math.max(this.line - 3, 0) - let end = Math.min(this.line + 2, lines.length) - let maxWidth = String(end).length - - return lines - .slice(start, end) - .map((line, index) => { - let number = start + 1 + index - let gutter = ' ' + (' ' + number).slice(-maxWidth) + ' | ' - if (number === this.line) { - if (line.length > 160) { - let padding = 20 - let subLineStart = Math.max(0, this.column - padding) - let subLineEnd = Math.max( - this.column + padding, - this.endColumn + padding - ) - let subLine = line.slice(subLineStart, subLineEnd) - - let spacing = - aside(gutter.replace(/\d/g, ' ')) + - line - .slice(0, Math.min(this.column - 1, padding - 1)) - .replace(/[^\t]/g, ' ') - - return ( - mark('>') + - aside(gutter) + - highlight(subLine) + - '\n ' + - spacing + - mark('^') - ) - } - - let spacing = - aside(gutter.replace(/\d/g, ' ')) + - line.slice(0, this.column - 1).replace(/[^\t]/g, ' ') - - return ( - mark('>') + - aside(gutter) + - highlight(line) + - '\n ' + - spacing + - mark('^') - ) - } - - return ' ' + aside(gutter) + highlight(line) - }) - .join('\n') - } - - toString() { - let code = this.showSourceCode() - if (code) { - code = '\n\n' + code + '\n' - } - return this.name + ': ' + this.message + code - } -} - -module.exports = CssSyntaxError -CssSyntaxError.default = CssSyntaxError diff --git a/node_modules/postcss/lib/declaration.d.ts b/node_modules/postcss/lib/declaration.d.ts deleted file mode 100644 index e707ad6..0000000 --- a/node_modules/postcss/lib/declaration.d.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { ContainerWithChildren } from './container.js' -import Node from './node.js' - -declare namespace Declaration { - export interface DeclarationRaws extends Record { - /** - * The space symbols before the node. It also stores `*` - * and `_` symbols before the declaration (IE hack). - */ - before?: string - - /** - * The symbols between the property and value for declarations. - */ - between?: string - - /** - * The content of the important statement, if it is not just `!important`. - */ - important?: string - - /** - * Declaration value with comments. - */ - value?: { - raw: string - value: string - } - } - - export interface DeclarationProps { - /** Whether the declaration has an `!important` annotation. */ - important?: boolean - /** Name of the declaration. */ - prop: string - /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ - raws?: DeclarationRaws - /** Value of the declaration. */ - value: string - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Declaration_ as default } -} - -/** - * It represents a class that handles - * [CSS declarations](https://developer.mozilla.org/en-US/docs/Web/CSS/Syntax#css_declarations) - * - * ```js - * Once (root, { Declaration }) { - * const color = new Declaration({ prop: 'color', value: 'black' }) - * root.append(color) - * } - * ``` - * - * ```js - * const root = postcss.parse('a { color: black }') - * const decl = root.first?.first - * - * decl.type //=> 'decl' - * decl.toString() //=> ' color: black' - * ``` - */ -declare class Declaration_ extends Node { - parent: ContainerWithChildren | undefined - raws: Declaration.DeclarationRaws - - type: 'decl' - - constructor(defaults?: Declaration.DeclarationProps) - assign(overrides: Declaration.DeclarationProps | object): this - - clone(overrides?: Partial): this - - cloneAfter(overrides?: Partial): this - - cloneBefore(overrides?: Partial): this - /** - * It represents a specificity of the declaration. - * - * If true, the CSS declaration will have an - * [important](https://developer.mozilla.org/en-US/docs/Web/CSS/important) - * specifier. - * - * ```js - * const root = postcss.parse('a { color: black !important; color: red }') - * - * root.first.first.important //=> true - * root.first.last.important //=> undefined - * ``` - */ - get important(): boolean - - set important(value: boolean) - /** - * The property name for a CSS declaration. - * - * ```js - * const root = postcss.parse('a { color: black }') - * const decl = root.first.first - * - * decl.prop //=> 'color' - * ``` - */ - get prop(): string - - set prop(value: string) - /** - * The property value for a CSS declaration. - * - * Any CSS comments inside the value string will be filtered out. - * CSS comments present in the source value will be available in - * the `raws` property. - * - * Assigning new `value` would ignore the comments in `raws` - * property while compiling node to string. - * - * ```js - * const root = postcss.parse('a { color: black }') - * const decl = root.first.first - * - * decl.value //=> 'black' - * ``` - */ - get value(): string - set value(value: string) - /** - * It represents a getter that returns `true` if a declaration starts with - * `--` or `$`, which are used to declare variables in CSS and SASS/SCSS. - * - * ```js - * const root = postcss.parse(':root { --one: 1 }') - * const one = root.first.first - * - * one.variable //=> true - * ``` - * - * ```js - * const root = postcss.parse('$one: 1') - * const one = root.first - * - * one.variable //=> true - * ``` - */ - get variable(): boolean -} - -declare class Declaration extends Declaration_ {} - -export = Declaration diff --git a/node_modules/postcss/lib/declaration.js b/node_modules/postcss/lib/declaration.js deleted file mode 100644 index a04bdec..0000000 --- a/node_modules/postcss/lib/declaration.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' - -let Node = require('./node') - -class Declaration extends Node { - constructor(defaults) { - if ( - defaults && - typeof defaults.value !== 'undefined' && - typeof defaults.value !== 'string' - ) { - defaults = { ...defaults, value: String(defaults.value) } - } - super(defaults) - this.type = 'decl' - } - - get variable() { - return this.prop.startsWith('--') || this.prop[0] === '$' - } -} - -module.exports = Declaration -Declaration.default = Declaration diff --git a/node_modules/postcss/lib/document.d.ts b/node_modules/postcss/lib/document.d.ts deleted file mode 100644 index f9e8063..0000000 --- a/node_modules/postcss/lib/document.d.ts +++ /dev/null @@ -1,69 +0,0 @@ -import Container, { ContainerProps } from './container.js' -import { ProcessOptions } from './postcss.js' -import Result from './result.js' -import Root from './root.js' - -declare namespace Document { - export interface DocumentProps extends ContainerProps { - nodes?: readonly Root[] - - /** - * Information to generate byte-to-byte equal node string as it was - * in the origin input. - * - * Every parser saves its own properties. - */ - raws?: Record - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Document_ as default } -} - -/** - * Represents a file and contains all its parsed nodes. - * - * **Experimental:** some aspects of this node could change within minor - * or patch version releases. - * - * ```js - * const document = htmlParser( - * '' - * ) - * document.type //=> 'document' - * document.nodes.length //=> 2 - * ``` - */ -declare class Document_ extends Container { - nodes: Root[] - parent: undefined - type: 'document' - - constructor(defaults?: Document.DocumentProps) - - assign(overrides: Document.DocumentProps | object): this - clone(overrides?: Partial): this - cloneAfter(overrides?: Partial): this - cloneBefore(overrides?: Partial): this - - /** - * Returns a `Result` instance representing the document’s CSS roots. - * - * ```js - * const root1 = postcss.parse(css1, { from: 'a.css' }) - * const root2 = postcss.parse(css2, { from: 'b.css' }) - * const document = postcss.document() - * document.append(root1) - * document.append(root2) - * const result = document.toResult({ to: 'all.css', map: true }) - * ``` - * - * @param opts Options. - * @return Result with current document’s CSS. - */ - toResult(options?: ProcessOptions): Result -} - -declare class Document extends Document_ {} - -export = Document diff --git a/node_modules/postcss/lib/document.js b/node_modules/postcss/lib/document.js deleted file mode 100644 index 4468991..0000000 --- a/node_modules/postcss/lib/document.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -let Container = require('./container') - -let LazyResult, Processor - -class Document extends Container { - constructor(defaults) { - // type needs to be passed to super, otherwise child roots won't be normalized correctly - super({ type: 'document', ...defaults }) - - if (!this.nodes) { - this.nodes = [] - } - } - - toResult(opts = {}) { - let lazy = new LazyResult(new Processor(), this, opts) - - return lazy.stringify() - } -} - -Document.registerLazyResult = dependant => { - LazyResult = dependant -} - -Document.registerProcessor = dependant => { - Processor = dependant -} - -module.exports = Document -Document.default = Document diff --git a/node_modules/postcss/lib/fromJSON.d.ts b/node_modules/postcss/lib/fromJSON.d.ts deleted file mode 100644 index e1deedb..0000000 --- a/node_modules/postcss/lib/fromJSON.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { JSONHydrator } from './postcss.js' - -interface FromJSON extends JSONHydrator { - default: FromJSON -} - -declare const fromJSON: FromJSON - -export = fromJSON diff --git a/node_modules/postcss/lib/fromJSON.js b/node_modules/postcss/lib/fromJSON.js deleted file mode 100644 index c9ac1a8..0000000 --- a/node_modules/postcss/lib/fromJSON.js +++ /dev/null @@ -1,54 +0,0 @@ -'use strict' - -let AtRule = require('./at-rule') -let Comment = require('./comment') -let Declaration = require('./declaration') -let Input = require('./input') -let PreviousMap = require('./previous-map') -let Root = require('./root') -let Rule = require('./rule') - -function fromJSON(json, inputs) { - if (Array.isArray(json)) return json.map(n => fromJSON(n)) - - let { inputs: ownInputs, ...defaults } = json - if (ownInputs) { - inputs = [] - for (let input of ownInputs) { - let inputHydrated = { ...input, __proto__: Input.prototype } - if (inputHydrated.map) { - inputHydrated.map = { - ...inputHydrated.map, - __proto__: PreviousMap.prototype - } - } - inputs.push(inputHydrated) - } - } - if (defaults.nodes) { - defaults.nodes = json.nodes.map(n => fromJSON(n, inputs)) - } - if (defaults.source) { - let { inputId, ...source } = defaults.source - defaults.source = source - if (inputId != null) { - defaults.source.input = inputs[inputId] - } - } - if (defaults.type === 'root') { - return new Root(defaults) - } else if (defaults.type === 'decl') { - return new Declaration(defaults) - } else if (defaults.type === 'rule') { - return new Rule(defaults) - } else if (defaults.type === 'comment') { - return new Comment(defaults) - } else if (defaults.type === 'atrule') { - return new AtRule(defaults) - } else { - throw new Error('Unknown node type: ' + json.type) - } -} - -module.exports = fromJSON -fromJSON.default = fromJSON diff --git a/node_modules/postcss/lib/input.d.ts b/node_modules/postcss/lib/input.d.ts deleted file mode 100644 index 46ded09..0000000 --- a/node_modules/postcss/lib/input.d.ts +++ /dev/null @@ -1,197 +0,0 @@ -import { CssSyntaxError, ProcessOptions } from './postcss.js' -import PreviousMap from './previous-map.js' - -declare namespace Input { - export interface FilePosition { - /** - * Column of inclusive start position in source file. - */ - column: number - - /** - * Column of exclusive end position in source file. - */ - endColumn?: number - - /** - * Line of exclusive end position in source file. - */ - endLine?: number - - /** - * Absolute path to the source file. - */ - file?: string - - /** - * Line of inclusive start position in source file. - */ - line: number - - /** - * Source code. - */ - source?: string - - /** - * URL for the source file. - */ - url: string - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Input_ as default } -} - -/** - * Represents the source CSS. - * - * ```js - * const root = postcss.parse(css, { from: file }) - * const input = root.source.input - * ``` - */ -declare class Input_ { - /** - * Input CSS source. - * - * ```js - * const input = postcss.parse('a{}', { from: file }).input - * input.css //=> "a{}" - * ``` - */ - css: string - - /** - * The absolute path to the CSS source file defined - * with the `from` option. - * - * ```js - * const root = postcss.parse(css, { from: 'a.css' }) - * root.source.input.file //=> '/home/ai/a.css' - * ``` - */ - file?: string - - /** - * The flag to indicate whether or not the source code has Unicode BOM. - */ - hasBOM: boolean - - /** - * The unique ID of the CSS source. It will be created if `from` option - * is not provided (because PostCSS does not know the file path). - * - * ```js - * const root = postcss.parse(css) - * root.source.input.file //=> undefined - * root.source.input.id //=> "" - * ``` - */ - id?: string - - /** - * The input source map passed from a compilation step before PostCSS - * (for example, from Sass compiler). - * - * ```js - * root.source.input.map.consumer().sources //=> ['a.sass'] - * ``` - */ - map: PreviousMap - - /** - * @param css Input CSS source. - * @param opts Process options. - */ - constructor(css: string, opts?: ProcessOptions) - - error( - message: string, - start: - | { - column: number - line: number - } - | { - offset: number - }, - end: - | { - column: number - line: number - } - | { - offset: number - }, - opts?: { plugin?: CssSyntaxError['plugin'] } - ): CssSyntaxError - - /** - * Returns `CssSyntaxError` with information about the error and its position. - */ - error( - message: string, - line: number, - column: number, - opts?: { plugin?: CssSyntaxError['plugin'] } - ): CssSyntaxError - - error( - message: string, - offset: number, - opts?: { plugin?: CssSyntaxError['plugin'] } - ): CssSyntaxError - - /** - * Converts source offset to line and column. - * - * @param offset Source offset. - */ - fromOffset(offset: number): { col: number; line: number } | null - /** - * Reads the input source map and returns a symbol position - * in the input source (e.g., in a Sass file that was compiled - * to CSS before being passed to PostCSS). Optionally takes an - * end position, exclusive. - * - * ```js - * root.source.input.origin(1, 1) //=> { file: 'a.css', line: 3, column: 1 } - * root.source.input.origin(1, 1, 1, 4) - * //=> { file: 'a.css', line: 3, column: 1, endLine: 3, endColumn: 4 } - * ``` - * - * @param line Line for inclusive start position in input CSS. - * @param column Column for inclusive start position in input CSS. - * @param endLine Line for exclusive end position in input CSS. - * @param endColumn Column for exclusive end position in input CSS. - * - * @return Position in input source. - */ - origin( - line: number, - column: number, - endLine?: number, - endColumn?: number - ): false | Input.FilePosition - /** Converts this to a JSON-friendly object representation. */ - toJSON(): object - - /** - * The CSS source identifier. Contains `Input#file` if the user - * set the `from` option, or `Input#id` if they did not. - * - * ```js - * const root = postcss.parse(css, { from: 'a.css' }) - * root.source.input.from //=> "/home/ai/a.css" - * - * const root = postcss.parse(css) - * root.source.input.from //=> "" - * ``` - */ - get from(): string -} - -declare class Input extends Input_ {} - -export = Input diff --git a/node_modules/postcss/lib/input.js b/node_modules/postcss/lib/input.js deleted file mode 100644 index 685bce7..0000000 --- a/node_modules/postcss/lib/input.js +++ /dev/null @@ -1,248 +0,0 @@ -'use strict' - -let { nanoid } = require('nanoid/non-secure') -let { isAbsolute, resolve } = require('path') -let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') -let { fileURLToPath, pathToFileURL } = require('url') - -let CssSyntaxError = require('./css-syntax-error') -let PreviousMap = require('./previous-map') -let terminalHighlight = require('./terminal-highlight') - -let fromOffsetCache = Symbol('fromOffsetCache') - -let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) -let pathAvailable = Boolean(resolve && isAbsolute) - -class Input { - constructor(css, opts = {}) { - if ( - css === null || - typeof css === 'undefined' || - (typeof css === 'object' && !css.toString) - ) { - throw new Error(`PostCSS received ${css} instead of CSS string`) - } - - this.css = css.toString() - - if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') { - this.hasBOM = true - this.css = this.css.slice(1) - } else { - this.hasBOM = false - } - - if (opts.from) { - if ( - !pathAvailable || - /^\w+:\/\//.test(opts.from) || - isAbsolute(opts.from) - ) { - this.file = opts.from - } else { - this.file = resolve(opts.from) - } - } - - if (pathAvailable && sourceMapAvailable) { - let map = new PreviousMap(this.css, opts) - if (map.text) { - this.map = map - let file = map.consumer().file - if (!this.file && file) this.file = this.mapResolve(file) - } - } - - if (!this.file) { - this.id = '' - } - if (this.map) this.map.file = this.from - } - - error(message, line, column, opts = {}) { - let endColumn, endLine, result - - if (line && typeof line === 'object') { - let start = line - let end = column - if (typeof start.offset === 'number') { - let pos = this.fromOffset(start.offset) - line = pos.line - column = pos.col - } else { - line = start.line - column = start.column - } - if (typeof end.offset === 'number') { - let pos = this.fromOffset(end.offset) - endLine = pos.line - endColumn = pos.col - } else { - endLine = end.line - endColumn = end.column - } - } else if (!column) { - let pos = this.fromOffset(line) - line = pos.line - column = pos.col - } - - let origin = this.origin(line, column, endLine, endColumn) - if (origin) { - result = new CssSyntaxError( - message, - origin.endLine === undefined - ? origin.line - : { column: origin.column, line: origin.line }, - origin.endLine === undefined - ? origin.column - : { column: origin.endColumn, line: origin.endLine }, - origin.source, - origin.file, - opts.plugin - ) - } else { - result = new CssSyntaxError( - message, - endLine === undefined ? line : { column, line }, - endLine === undefined ? column : { column: endColumn, line: endLine }, - this.css, - this.file, - opts.plugin - ) - } - - result.input = { column, endColumn, endLine, line, source: this.css } - if (this.file) { - if (pathToFileURL) { - result.input.url = pathToFileURL(this.file).toString() - } - result.input.file = this.file - } - - return result - } - - fromOffset(offset) { - let lastLine, lineToIndex - if (!this[fromOffsetCache]) { - let lines = this.css.split('\n') - lineToIndex = new Array(lines.length) - let prevIndex = 0 - - for (let i = 0, l = lines.length; i < l; i++) { - lineToIndex[i] = prevIndex - prevIndex += lines[i].length + 1 - } - - this[fromOffsetCache] = lineToIndex - } else { - lineToIndex = this[fromOffsetCache] - } - lastLine = lineToIndex[lineToIndex.length - 1] - - let min = 0 - if (offset >= lastLine) { - min = lineToIndex.length - 1 - } else { - let max = lineToIndex.length - 2 - let mid - while (min < max) { - mid = min + ((max - min) >> 1) - if (offset < lineToIndex[mid]) { - max = mid - 1 - } else if (offset >= lineToIndex[mid + 1]) { - min = mid + 1 - } else { - min = mid - break - } - } - } - return { - col: offset - lineToIndex[min] + 1, - line: min + 1 - } - } - - mapResolve(file) { - if (/^\w+:\/\//.test(file)) { - return file - } - return resolve(this.map.consumer().sourceRoot || this.map.root || '.', file) - } - - origin(line, column, endLine, endColumn) { - if (!this.map) return false - let consumer = this.map.consumer() - - let from = consumer.originalPositionFor({ column, line }) - if (!from.source) return false - - let to - if (typeof endLine === 'number') { - to = consumer.originalPositionFor({ column: endColumn, line: endLine }) - } - - let fromUrl - - if (isAbsolute(from.source)) { - fromUrl = pathToFileURL(from.source) - } else { - fromUrl = new URL( - from.source, - this.map.consumer().sourceRoot || pathToFileURL(this.map.mapFile) - ) - } - - let result = { - column: from.column, - endColumn: to && to.column, - endLine: to && to.line, - line: from.line, - url: fromUrl.toString() - } - - if (fromUrl.protocol === 'file:') { - if (fileURLToPath) { - result.file = fileURLToPath(fromUrl) - } else { - /* c8 ignore next 2 */ - throw new Error(`file: protocol is not available in this PostCSS build`) - } - } - - let source = consumer.sourceContentFor(from.source) - if (source) result.source = source - - return result - } - - toJSON() { - let json = {} - for (let name of ['hasBOM', 'css', 'file', 'id']) { - if (this[name] != null) { - json[name] = this[name] - } - } - if (this.map) { - json.map = { ...this.map } - if (json.map.consumerCache) { - json.map.consumerCache = undefined - } - } - return json - } - - get from() { - return this.file || this.id - } -} - -module.exports = Input -Input.default = Input - -if (terminalHighlight && terminalHighlight.registerInput) { - terminalHighlight.registerInput(Input) -} diff --git a/node_modules/postcss/lib/lazy-result.d.ts b/node_modules/postcss/lib/lazy-result.d.ts deleted file mode 100644 index dd291aa..0000000 --- a/node_modules/postcss/lib/lazy-result.d.ts +++ /dev/null @@ -1,190 +0,0 @@ -import Document from './document.js' -import { SourceMap } from './postcss.js' -import Processor from './processor.js' -import Result, { Message, ResultOptions } from './result.js' -import Root from './root.js' -import Warning from './warning.js' - -declare namespace LazyResult { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { LazyResult_ as default } -} - -/** - * A Promise proxy for the result of PostCSS transformations. - * - * A `LazyResult` instance is returned by `Processor#process`. - * - * ```js - * const lazy = postcss([autoprefixer]).process(css) - * ``` - */ -declare class LazyResult_ - implements PromiseLike> -{ - /** - * Processes input CSS through synchronous and asynchronous plugins - * and calls onRejected for each error thrown in any plugin. - * - * It implements standard Promise API. - * - * ```js - * postcss([autoprefixer]).process(css).then(result => { - * console.log(result.css) - * }).catch(error => { - * console.error(error) - * }) - * ``` - */ - catch: Promise>['catch'] - - /** - * Processes input CSS through synchronous and asynchronous plugins - * and calls onFinally on any error or when all plugins will finish work. - * - * It implements standard Promise API. - * - * ```js - * postcss([autoprefixer]).process(css).finally(() => { - * console.log('processing ended') - * }) - * ``` - */ - finally: Promise>['finally'] - - /** - * Processes input CSS through synchronous and asynchronous plugins - * and calls `onFulfilled` with a Result instance. If a plugin throws - * an error, the `onRejected` callback will be executed. - * - * It implements standard Promise API. - * - * ```js - * postcss([autoprefixer]).process(css, { from: cssPath }).then(result => { - * console.log(result.css) - * }) - * ``` - */ - then: Promise>['then'] - - /** - * @param processor Processor used for this transformation. - * @param css CSS to parse and transform. - * @param opts Options from the `Processor#process` or `Root#toResult`. - */ - constructor(processor: Processor, css: string, opts: ResultOptions) - - /** - * Run plugin in async way and return `Result`. - * - * @return Result with output content. - */ - async(): Promise> - - /** - * Run plugin in sync way and return `Result`. - * - * @return Result with output content. - */ - sync(): Result - - /** - * Alias for the `LazyResult#css` property. - * - * ```js - * lazy + '' === lazy.css - * ``` - * - * @return Output CSS. - */ - toString(): string - - /** - * Processes input CSS through synchronous plugins - * and calls `Result#warnings`. - * - * @return Warnings from plugins. - */ - warnings(): Warning[] - - /** - * An alias for the `css` property. Use it with syntaxes - * that generate non-CSS output. - * - * This property will only work with synchronous plugins. - * If the processor contains any asynchronous plugins - * it will throw an error. - * - * PostCSS runners should always use `LazyResult#then`. - */ - get content(): string - - /** - * Processes input CSS through synchronous plugins, converts `Root` - * to a CSS string and returns `Result#css`. - * - * This property will only work with synchronous plugins. - * If the processor contains any asynchronous plugins - * it will throw an error. - * - * PostCSS runners should always use `LazyResult#then`. - */ - get css(): string - - /** - * Processes input CSS through synchronous plugins - * and returns `Result#map`. - * - * This property will only work with synchronous plugins. - * If the processor contains any asynchronous plugins - * it will throw an error. - * - * PostCSS runners should always use `LazyResult#then`. - */ - get map(): SourceMap - - /** - * Processes input CSS through synchronous plugins - * and returns `Result#messages`. - * - * This property will only work with synchronous plugins. If the processor - * contains any asynchronous plugins it will throw an error. - * - * PostCSS runners should always use `LazyResult#then`. - */ - get messages(): Message[] - - /** - * Options from the `Processor#process` call. - */ - get opts(): ResultOptions - - /** - * Returns a `Processor` instance, which will be used - * for CSS transformations. - */ - get processor(): Processor - - /** - * Processes input CSS through synchronous plugins - * and returns `Result#root`. - * - * This property will only work with synchronous plugins. If the processor - * contains any asynchronous plugins it will throw an error. - * - * PostCSS runners should always use `LazyResult#then`. - */ - get root(): RootNode - - /** - * Returns the default string description of an object. - * Required to implement the Promise interface. - */ - get [Symbol.toStringTag](): string -} - -declare class LazyResult< - RootNode = Document | Root -> extends LazyResult_ {} - -export = LazyResult diff --git a/node_modules/postcss/lib/lazy-result.js b/node_modules/postcss/lib/lazy-result.js deleted file mode 100644 index e27701d..0000000 --- a/node_modules/postcss/lib/lazy-result.js +++ /dev/null @@ -1,550 +0,0 @@ -'use strict' - -let Container = require('./container') -let Document = require('./document') -let MapGenerator = require('./map-generator') -let parse = require('./parse') -let Result = require('./result') -let Root = require('./root') -let stringify = require('./stringify') -let { isClean, my } = require('./symbols') -let warnOnce = require('./warn-once') - -const TYPE_TO_CLASS_NAME = { - atrule: 'AtRule', - comment: 'Comment', - decl: 'Declaration', - document: 'Document', - root: 'Root', - rule: 'Rule' -} - -const PLUGIN_PROPS = { - AtRule: true, - AtRuleExit: true, - Comment: true, - CommentExit: true, - Declaration: true, - DeclarationExit: true, - Document: true, - DocumentExit: true, - Once: true, - OnceExit: true, - postcssPlugin: true, - prepare: true, - Root: true, - RootExit: true, - Rule: true, - RuleExit: true -} - -const NOT_VISITORS = { - Once: true, - postcssPlugin: true, - prepare: true -} - -const CHILDREN = 0 - -function isPromise(obj) { - return typeof obj === 'object' && typeof obj.then === 'function' -} - -function getEvents(node) { - let key = false - let type = TYPE_TO_CLASS_NAME[node.type] - if (node.type === 'decl') { - key = node.prop.toLowerCase() - } else if (node.type === 'atrule') { - key = node.name.toLowerCase() - } - - if (key && node.append) { - return [ - type, - type + '-' + key, - CHILDREN, - type + 'Exit', - type + 'Exit-' + key - ] - } else if (key) { - return [type, type + '-' + key, type + 'Exit', type + 'Exit-' + key] - } else if (node.append) { - return [type, CHILDREN, type + 'Exit'] - } else { - return [type, type + 'Exit'] - } -} - -function toStack(node) { - let events - if (node.type === 'document') { - events = ['Document', CHILDREN, 'DocumentExit'] - } else if (node.type === 'root') { - events = ['Root', CHILDREN, 'RootExit'] - } else { - events = getEvents(node) - } - - return { - eventIndex: 0, - events, - iterator: 0, - node, - visitorIndex: 0, - visitors: [] - } -} - -function cleanMarks(node) { - node[isClean] = false - if (node.nodes) node.nodes.forEach(i => cleanMarks(i)) - return node -} - -let postcss = {} - -class LazyResult { - constructor(processor, css, opts) { - this.stringified = false - this.processed = false - - let root - if ( - typeof css === 'object' && - css !== null && - (css.type === 'root' || css.type === 'document') - ) { - root = cleanMarks(css) - } else if (css instanceof LazyResult || css instanceof Result) { - root = cleanMarks(css.root) - if (css.map) { - if (typeof opts.map === 'undefined') opts.map = {} - if (!opts.map.inline) opts.map.inline = false - opts.map.prev = css.map - } - } else { - let parser = parse - if (opts.syntax) parser = opts.syntax.parse - if (opts.parser) parser = opts.parser - if (parser.parse) parser = parser.parse - - try { - root = parser(css, opts) - } catch (error) { - this.processed = true - this.error = error - } - - if (root && !root[my]) { - /* c8 ignore next 2 */ - Container.rebuild(root) - } - } - - this.result = new Result(processor, root, opts) - this.helpers = { ...postcss, postcss, result: this.result } - this.plugins = this.processor.plugins.map(plugin => { - if (typeof plugin === 'object' && plugin.prepare) { - return { ...plugin, ...plugin.prepare(this.result) } - } else { - return plugin - } - }) - } - - async() { - if (this.error) return Promise.reject(this.error) - if (this.processed) return Promise.resolve(this.result) - if (!this.processing) { - this.processing = this.runAsync() - } - return this.processing - } - - catch(onRejected) { - return this.async().catch(onRejected) - } - - finally(onFinally) { - return this.async().then(onFinally, onFinally) - } - - getAsyncError() { - throw new Error('Use process(css).then(cb) to work with async plugins') - } - - handleError(error, node) { - let plugin = this.result.lastPlugin - try { - if (node) node.addToError(error) - this.error = error - if (error.name === 'CssSyntaxError' && !error.plugin) { - error.plugin = plugin.postcssPlugin - error.setMessage() - } else if (plugin.postcssVersion) { - if (process.env.NODE_ENV !== 'production') { - let pluginName = plugin.postcssPlugin - let pluginVer = plugin.postcssVersion - let runtimeVer = this.result.processor.version - let a = pluginVer.split('.') - let b = runtimeVer.split('.') - - if (a[0] !== b[0] || parseInt(a[1]) > parseInt(b[1])) { - // eslint-disable-next-line no-console - console.error( - 'Unknown error from PostCSS plugin. Your current PostCSS ' + - 'version is ' + - runtimeVer + - ', but ' + - pluginName + - ' uses ' + - pluginVer + - '. Perhaps this is the source of the error below.' - ) - } - } - } - } catch (err) { - /* c8 ignore next 3 */ - // eslint-disable-next-line no-console - if (console && console.error) console.error(err) - } - return error - } - - prepareVisitors() { - this.listeners = {} - let add = (plugin, type, cb) => { - if (!this.listeners[type]) this.listeners[type] = [] - this.listeners[type].push([plugin, cb]) - } - for (let plugin of this.plugins) { - if (typeof plugin === 'object') { - for (let event in plugin) { - if (!PLUGIN_PROPS[event] && /^[A-Z]/.test(event)) { - throw new Error( - `Unknown event ${event} in ${plugin.postcssPlugin}. ` + - `Try to update PostCSS (${this.processor.version} now).` - ) - } - if (!NOT_VISITORS[event]) { - if (typeof plugin[event] === 'object') { - for (let filter in plugin[event]) { - if (filter === '*') { - add(plugin, event, plugin[event][filter]) - } else { - add( - plugin, - event + '-' + filter.toLowerCase(), - plugin[event][filter] - ) - } - } - } else if (typeof plugin[event] === 'function') { - add(plugin, event, plugin[event]) - } - } - } - } - } - this.hasListener = Object.keys(this.listeners).length > 0 - } - - async runAsync() { - this.plugin = 0 - for (let i = 0; i < this.plugins.length; i++) { - let plugin = this.plugins[i] - let promise = this.runOnRoot(plugin) - if (isPromise(promise)) { - try { - await promise - } catch (error) { - throw this.handleError(error) - } - } - } - - this.prepareVisitors() - if (this.hasListener) { - let root = this.result.root - while (!root[isClean]) { - root[isClean] = true - let stack = [toStack(root)] - while (stack.length > 0) { - let promise = this.visitTick(stack) - if (isPromise(promise)) { - try { - await promise - } catch (e) { - let node = stack[stack.length - 1].node - throw this.handleError(e, node) - } - } - } - } - - if (this.listeners.OnceExit) { - for (let [plugin, visitor] of this.listeners.OnceExit) { - this.result.lastPlugin = plugin - try { - if (root.type === 'document') { - let roots = root.nodes.map(subRoot => - visitor(subRoot, this.helpers) - ) - - await Promise.all(roots) - } else { - await visitor(root, this.helpers) - } - } catch (e) { - throw this.handleError(e) - } - } - } - } - - this.processed = true - return this.stringify() - } - - runOnRoot(plugin) { - this.result.lastPlugin = plugin - try { - if (typeof plugin === 'object' && plugin.Once) { - if (this.result.root.type === 'document') { - let roots = this.result.root.nodes.map(root => - plugin.Once(root, this.helpers) - ) - - if (isPromise(roots[0])) { - return Promise.all(roots) - } - - return roots - } - - return plugin.Once(this.result.root, this.helpers) - } else if (typeof plugin === 'function') { - return plugin(this.result.root, this.result) - } - } catch (error) { - throw this.handleError(error) - } - } - - stringify() { - if (this.error) throw this.error - if (this.stringified) return this.result - this.stringified = true - - this.sync() - - let opts = this.result.opts - let str = stringify - if (opts.syntax) str = opts.syntax.stringify - if (opts.stringifier) str = opts.stringifier - if (str.stringify) str = str.stringify - - let map = new MapGenerator(str, this.result.root, this.result.opts) - let data = map.generate() - this.result.css = data[0] - this.result.map = data[1] - - return this.result - } - - sync() { - if (this.error) throw this.error - if (this.processed) return this.result - this.processed = true - - if (this.processing) { - throw this.getAsyncError() - } - - for (let plugin of this.plugins) { - let promise = this.runOnRoot(plugin) - if (isPromise(promise)) { - throw this.getAsyncError() - } - } - - this.prepareVisitors() - if (this.hasListener) { - let root = this.result.root - while (!root[isClean]) { - root[isClean] = true - this.walkSync(root) - } - if (this.listeners.OnceExit) { - if (root.type === 'document') { - for (let subRoot of root.nodes) { - this.visitSync(this.listeners.OnceExit, subRoot) - } - } else { - this.visitSync(this.listeners.OnceExit, root) - } - } - } - - return this.result - } - - then(onFulfilled, onRejected) { - if (process.env.NODE_ENV !== 'production') { - if (!('from' in this.opts)) { - warnOnce( - 'Without `from` option PostCSS could generate wrong source map ' + - 'and will not find Browserslist config. Set it to CSS file path ' + - 'or to `undefined` to prevent this warning.' - ) - } - } - return this.async().then(onFulfilled, onRejected) - } - - toString() { - return this.css - } - - visitSync(visitors, node) { - for (let [plugin, visitor] of visitors) { - this.result.lastPlugin = plugin - let promise - try { - promise = visitor(node, this.helpers) - } catch (e) { - throw this.handleError(e, node.proxyOf) - } - if (node.type !== 'root' && node.type !== 'document' && !node.parent) { - return true - } - if (isPromise(promise)) { - throw this.getAsyncError() - } - } - } - - visitTick(stack) { - let visit = stack[stack.length - 1] - let { node, visitors } = visit - - if (node.type !== 'root' && node.type !== 'document' && !node.parent) { - stack.pop() - return - } - - if (visitors.length > 0 && visit.visitorIndex < visitors.length) { - let [plugin, visitor] = visitors[visit.visitorIndex] - visit.visitorIndex += 1 - if (visit.visitorIndex === visitors.length) { - visit.visitors = [] - visit.visitorIndex = 0 - } - this.result.lastPlugin = plugin - try { - return visitor(node.toProxy(), this.helpers) - } catch (e) { - throw this.handleError(e, node) - } - } - - if (visit.iterator !== 0) { - let iterator = visit.iterator - let child - while ((child = node.nodes[node.indexes[iterator]])) { - node.indexes[iterator] += 1 - if (!child[isClean]) { - child[isClean] = true - stack.push(toStack(child)) - return - } - } - visit.iterator = 0 - delete node.indexes[iterator] - } - - let events = visit.events - while (visit.eventIndex < events.length) { - let event = events[visit.eventIndex] - visit.eventIndex += 1 - if (event === CHILDREN) { - if (node.nodes && node.nodes.length) { - node[isClean] = true - visit.iterator = node.getIterator() - } - return - } else if (this.listeners[event]) { - visit.visitors = this.listeners[event] - return - } - } - stack.pop() - } - - walkSync(node) { - node[isClean] = true - let events = getEvents(node) - for (let event of events) { - if (event === CHILDREN) { - if (node.nodes) { - node.each(child => { - if (!child[isClean]) this.walkSync(child) - }) - } - } else { - let visitors = this.listeners[event] - if (visitors) { - if (this.visitSync(visitors, node.toProxy())) return - } - } - } - } - - warnings() { - return this.sync().warnings() - } - - get content() { - return this.stringify().content - } - - get css() { - return this.stringify().css - } - - get map() { - return this.stringify().map - } - - get messages() { - return this.sync().messages - } - - get opts() { - return this.result.opts - } - - get processor() { - return this.result.processor - } - - get root() { - return this.sync().root - } - - get [Symbol.toStringTag]() { - return 'LazyResult' - } -} - -LazyResult.registerPostcss = dependant => { - postcss = dependant -} - -module.exports = LazyResult -LazyResult.default = LazyResult - -Root.registerLazyResult(LazyResult) -Document.registerLazyResult(LazyResult) diff --git a/node_modules/postcss/lib/list.d.ts b/node_modules/postcss/lib/list.d.ts deleted file mode 100644 index e262ad3..0000000 --- a/node_modules/postcss/lib/list.d.ts +++ /dev/null @@ -1,60 +0,0 @@ -declare namespace list { - type List = { - /** - * Safely splits comma-separated values (such as those for `transition-*` - * and `background` properties). - * - * ```js - * Once (root, { list }) { - * list.comma('black, linear-gradient(white, black)') - * //=> ['black', 'linear-gradient(white, black)'] - * } - * ``` - * - * @param str Comma-separated values. - * @return Split values. - */ - comma(str: string): string[] - - default: List - - /** - * Safely splits space-separated values (such as those for `background`, - * `border-radius`, and other shorthand properties). - * - * ```js - * Once (root, { list }) { - * list.space('1px calc(10% + 1px)') //=> ['1px', 'calc(10% + 1px)'] - * } - * ``` - * - * @param str Space-separated values. - * @return Split values. - */ - space(str: string): string[] - - /** - * Safely splits values. - * - * ```js - * Once (root, { list }) { - * list.split('1px calc(10% + 1px)', [' ', '\n', '\t']) //=> ['1px', 'calc(10% + 1px)'] - * } - * ``` - * - * @param string separated values. - * @param separators array of separators. - * @param last boolean indicator. - * @return Split values. - */ - split( - string: string, - separators: readonly string[], - last: boolean - ): string[] - } -} - -declare const list: list.List - -export = list diff --git a/node_modules/postcss/lib/list.js b/node_modules/postcss/lib/list.js deleted file mode 100644 index 1b31f98..0000000 --- a/node_modules/postcss/lib/list.js +++ /dev/null @@ -1,58 +0,0 @@ -'use strict' - -let list = { - comma(string) { - return list.split(string, [','], true) - }, - - space(string) { - let spaces = [' ', '\n', '\t'] - return list.split(string, spaces) - }, - - split(string, separators, last) { - let array = [] - let current = '' - let split = false - - let func = 0 - let inQuote = false - let prevQuote = '' - let escape = false - - for (let letter of string) { - if (escape) { - escape = false - } else if (letter === '\\') { - escape = true - } else if (inQuote) { - if (letter === prevQuote) { - inQuote = false - } - } else if (letter === '"' || letter === "'") { - inQuote = true - prevQuote = letter - } else if (letter === '(') { - func += 1 - } else if (letter === ')') { - if (func > 0) func -= 1 - } else if (func === 0) { - if (separators.includes(letter)) split = true - } - - if (split) { - if (current !== '') array.push(current.trim()) - current = '' - split = false - } else { - current += letter - } - } - - if (last || current !== '') array.push(current.trim()) - return array - } -} - -module.exports = list -list.default = list diff --git a/node_modules/postcss/lib/map-generator.js b/node_modules/postcss/lib/map-generator.js deleted file mode 100644 index 89069d3..0000000 --- a/node_modules/postcss/lib/map-generator.js +++ /dev/null @@ -1,368 +0,0 @@ -'use strict' - -let { dirname, relative, resolve, sep } = require('path') -let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') -let { pathToFileURL } = require('url') - -let Input = require('./input') - -let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) -let pathAvailable = Boolean(dirname && resolve && relative && sep) - -class MapGenerator { - constructor(stringify, root, opts, cssString) { - this.stringify = stringify - this.mapOpts = opts.map || {} - this.root = root - this.opts = opts - this.css = cssString - this.originalCSS = cssString - this.usesFileUrls = !this.mapOpts.from && this.mapOpts.absolute - - this.memoizedFileURLs = new Map() - this.memoizedPaths = new Map() - this.memoizedURLs = new Map() - } - - addAnnotation() { - let content - - if (this.isInline()) { - content = - 'data:application/json;base64,' + this.toBase64(this.map.toString()) - } else if (typeof this.mapOpts.annotation === 'string') { - content = this.mapOpts.annotation - } else if (typeof this.mapOpts.annotation === 'function') { - content = this.mapOpts.annotation(this.opts.to, this.root) - } else { - content = this.outputFile() + '.map' - } - let eol = '\n' - if (this.css.includes('\r\n')) eol = '\r\n' - - this.css += eol + '/*# sourceMappingURL=' + content + ' */' - } - - applyPrevMaps() { - for (let prev of this.previous()) { - let from = this.toUrl(this.path(prev.file)) - let root = prev.root || dirname(prev.file) - let map - - if (this.mapOpts.sourcesContent === false) { - map = new SourceMapConsumer(prev.text) - if (map.sourcesContent) { - map.sourcesContent = null - } - } else { - map = prev.consumer() - } - - this.map.applySourceMap(map, from, this.toUrl(this.path(root))) - } - } - - clearAnnotation() { - if (this.mapOpts.annotation === false) return - - if (this.root) { - let node - for (let i = this.root.nodes.length - 1; i >= 0; i--) { - node = this.root.nodes[i] - if (node.type !== 'comment') continue - if (node.text.startsWith('# sourceMappingURL=')) { - this.root.removeChild(i) - } - } - } else if (this.css) { - this.css = this.css.replace(/\n*\/\*#[\S\s]*?\*\/$/gm, '') - } - } - - generate() { - this.clearAnnotation() - if (pathAvailable && sourceMapAvailable && this.isMap()) { - return this.generateMap() - } else { - let result = '' - this.stringify(this.root, i => { - result += i - }) - return [result] - } - } - - generateMap() { - if (this.root) { - this.generateString() - } else if (this.previous().length === 1) { - let prev = this.previous()[0].consumer() - prev.file = this.outputFile() - this.map = SourceMapGenerator.fromSourceMap(prev, { - ignoreInvalidMapping: true - }) - } else { - this.map = new SourceMapGenerator({ - file: this.outputFile(), - ignoreInvalidMapping: true - }) - this.map.addMapping({ - generated: { column: 0, line: 1 }, - original: { column: 0, line: 1 }, - source: this.opts.from - ? this.toUrl(this.path(this.opts.from)) - : '' - }) - } - - if (this.isSourcesContent()) this.setSourcesContent() - if (this.root && this.previous().length > 0) this.applyPrevMaps() - if (this.isAnnotation()) this.addAnnotation() - - if (this.isInline()) { - return [this.css] - } else { - return [this.css, this.map] - } - } - - generateString() { - this.css = '' - this.map = new SourceMapGenerator({ - file: this.outputFile(), - ignoreInvalidMapping: true - }) - - let line = 1 - let column = 1 - - let noSource = '' - let mapping = { - generated: { column: 0, line: 0 }, - original: { column: 0, line: 0 }, - source: '' - } - - let last, lines - this.stringify(this.root, (str, node, type) => { - this.css += str - - if (node && type !== 'end') { - mapping.generated.line = line - mapping.generated.column = column - 1 - if (node.source && node.source.start) { - mapping.source = this.sourcePath(node) - mapping.original.line = node.source.start.line - mapping.original.column = node.source.start.column - 1 - this.map.addMapping(mapping) - } else { - mapping.source = noSource - mapping.original.line = 1 - mapping.original.column = 0 - this.map.addMapping(mapping) - } - } - - lines = str.match(/\n/g) - if (lines) { - line += lines.length - last = str.lastIndexOf('\n') - column = str.length - last - } else { - column += str.length - } - - if (node && type !== 'start') { - let p = node.parent || { raws: {} } - let childless = - node.type === 'decl' || (node.type === 'atrule' && !node.nodes) - if (!childless || node !== p.last || p.raws.semicolon) { - if (node.source && node.source.end) { - mapping.source = this.sourcePath(node) - mapping.original.line = node.source.end.line - mapping.original.column = node.source.end.column - 1 - mapping.generated.line = line - mapping.generated.column = column - 2 - this.map.addMapping(mapping) - } else { - mapping.source = noSource - mapping.original.line = 1 - mapping.original.column = 0 - mapping.generated.line = line - mapping.generated.column = column - 1 - this.map.addMapping(mapping) - } - } - } - }) - } - - isAnnotation() { - if (this.isInline()) { - return true - } - if (typeof this.mapOpts.annotation !== 'undefined') { - return this.mapOpts.annotation - } - if (this.previous().length) { - return this.previous().some(i => i.annotation) - } - return true - } - - isInline() { - if (typeof this.mapOpts.inline !== 'undefined') { - return this.mapOpts.inline - } - - let annotation = this.mapOpts.annotation - if (typeof annotation !== 'undefined' && annotation !== true) { - return false - } - - if (this.previous().length) { - return this.previous().some(i => i.inline) - } - return true - } - - isMap() { - if (typeof this.opts.map !== 'undefined') { - return !!this.opts.map - } - return this.previous().length > 0 - } - - isSourcesContent() { - if (typeof this.mapOpts.sourcesContent !== 'undefined') { - return this.mapOpts.sourcesContent - } - if (this.previous().length) { - return this.previous().some(i => i.withContent()) - } - return true - } - - outputFile() { - if (this.opts.to) { - return this.path(this.opts.to) - } else if (this.opts.from) { - return this.path(this.opts.from) - } else { - return 'to.css' - } - } - - path(file) { - if (this.mapOpts.absolute) return file - if (file.charCodeAt(0) === 60 /* `<` */) return file - if (/^\w+:\/\//.test(file)) return file - let cached = this.memoizedPaths.get(file) - if (cached) return cached - - let from = this.opts.to ? dirname(this.opts.to) : '.' - - if (typeof this.mapOpts.annotation === 'string') { - from = dirname(resolve(from, this.mapOpts.annotation)) - } - - let path = relative(from, file) - this.memoizedPaths.set(file, path) - - return path - } - - previous() { - if (!this.previousMaps) { - this.previousMaps = [] - if (this.root) { - this.root.walk(node => { - if (node.source && node.source.input.map) { - let map = node.source.input.map - if (!this.previousMaps.includes(map)) { - this.previousMaps.push(map) - } - } - }) - } else { - let input = new Input(this.originalCSS, this.opts) - if (input.map) this.previousMaps.push(input.map) - } - } - - return this.previousMaps - } - - setSourcesContent() { - let already = {} - if (this.root) { - this.root.walk(node => { - if (node.source) { - let from = node.source.input.from - if (from && !already[from]) { - already[from] = true - let fromUrl = this.usesFileUrls - ? this.toFileUrl(from) - : this.toUrl(this.path(from)) - this.map.setSourceContent(fromUrl, node.source.input.css) - } - } - }) - } else if (this.css) { - let from = this.opts.from - ? this.toUrl(this.path(this.opts.from)) - : '' - this.map.setSourceContent(from, this.css) - } - } - - sourcePath(node) { - if (this.mapOpts.from) { - return this.toUrl(this.mapOpts.from) - } else if (this.usesFileUrls) { - return this.toFileUrl(node.source.input.from) - } else { - return this.toUrl(this.path(node.source.input.from)) - } - } - - toBase64(str) { - if (Buffer) { - return Buffer.from(str).toString('base64') - } else { - return window.btoa(unescape(encodeURIComponent(str))) - } - } - - toFileUrl(path) { - let cached = this.memoizedFileURLs.get(path) - if (cached) return cached - - if (pathToFileURL) { - let fileURL = pathToFileURL(path).toString() - this.memoizedFileURLs.set(path, fileURL) - - return fileURL - } else { - throw new Error( - '`map.absolute` option is not available in this PostCSS build' - ) - } - } - - toUrl(path) { - let cached = this.memoizedURLs.get(path) - if (cached) return cached - - if (sep === '\\') { - path = path.replace(/\\/g, '/') - } - - let url = encodeURI(path).replace(/[#?]/g, encodeURIComponent) - this.memoizedURLs.set(path, url) - - return url - } -} - -module.exports = MapGenerator diff --git a/node_modules/postcss/lib/no-work-result.d.ts b/node_modules/postcss/lib/no-work-result.d.ts deleted file mode 100644 index 8039076..0000000 --- a/node_modules/postcss/lib/no-work-result.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import LazyResult from './lazy-result.js' -import { SourceMap } from './postcss.js' -import Processor from './processor.js' -import Result, { Message, ResultOptions } from './result.js' -import Root from './root.js' -import Warning from './warning.js' - -declare namespace NoWorkResult { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { NoWorkResult_ as default } -} - -/** - * A Promise proxy for the result of PostCSS transformations. - * This lazy result instance doesn't parse css unless `NoWorkResult#root` or `Result#root` - * are accessed. See the example below for details. - * A `NoWork` instance is returned by `Processor#process` ONLY when no plugins defined. - * - * ```js - * const noWorkResult = postcss().process(css) // No plugins are defined. - * // CSS is not parsed - * let root = noWorkResult.root // now css is parsed because we accessed the root - * ``` - */ -declare class NoWorkResult_ implements LazyResult { - catch: Promise>['catch'] - finally: Promise>['finally'] - then: Promise>['then'] - constructor(processor: Processor, css: string, opts: ResultOptions) - async(): Promise> - sync(): Result - toString(): string - warnings(): Warning[] - get content(): string - get css(): string - get map(): SourceMap - get messages(): Message[] - get opts(): ResultOptions - get processor(): Processor - get root(): Root - get [Symbol.toStringTag](): string -} - -declare class NoWorkResult extends NoWorkResult_ {} - -export = NoWorkResult diff --git a/node_modules/postcss/lib/no-work-result.js b/node_modules/postcss/lib/no-work-result.js deleted file mode 100644 index a84d843..0000000 --- a/node_modules/postcss/lib/no-work-result.js +++ /dev/null @@ -1,138 +0,0 @@ -'use strict' - -let MapGenerator = require('./map-generator') -let parse = require('./parse') -const Result = require('./result') -let stringify = require('./stringify') -let warnOnce = require('./warn-once') - -class NoWorkResult { - constructor(processor, css, opts) { - css = css.toString() - this.stringified = false - - this._processor = processor - this._css = css - this._opts = opts - this._map = undefined - let root - - let str = stringify - this.result = new Result(this._processor, root, this._opts) - this.result.css = css - - let self = this - Object.defineProperty(this.result, 'root', { - get() { - return self.root - } - }) - - let map = new MapGenerator(str, root, this._opts, css) - if (map.isMap()) { - let [generatedCSS, generatedMap] = map.generate() - if (generatedCSS) { - this.result.css = generatedCSS - } - if (generatedMap) { - this.result.map = generatedMap - } - } else { - map.clearAnnotation() - this.result.css = map.css - } - } - - async() { - if (this.error) return Promise.reject(this.error) - return Promise.resolve(this.result) - } - - catch(onRejected) { - return this.async().catch(onRejected) - } - - finally(onFinally) { - return this.async().then(onFinally, onFinally) - } - - sync() { - if (this.error) throw this.error - return this.result - } - - then(onFulfilled, onRejected) { - if (process.env.NODE_ENV !== 'production') { - if (!('from' in this._opts)) { - warnOnce( - 'Without `from` option PostCSS could generate wrong source map ' + - 'and will not find Browserslist config. Set it to CSS file path ' + - 'or to `undefined` to prevent this warning.' - ) - } - } - - return this.async().then(onFulfilled, onRejected) - } - - toString() { - return this._css - } - - warnings() { - return [] - } - - get content() { - return this.result.css - } - - get css() { - return this.result.css - } - - get map() { - return this.result.map - } - - get messages() { - return [] - } - - get opts() { - return this.result.opts - } - - get processor() { - return this.result.processor - } - - get root() { - if (this._root) { - return this._root - } - - let root - let parser = parse - - try { - root = parser(this._css, this._opts) - } catch (error) { - this.error = error - } - - if (this.error) { - throw this.error - } else { - this._root = root - return root - } - } - - get [Symbol.toStringTag]() { - return 'NoWorkResult' - } -} - -module.exports = NoWorkResult -NoWorkResult.default = NoWorkResult diff --git a/node_modules/postcss/lib/node.d.ts b/node_modules/postcss/lib/node.d.ts deleted file mode 100644 index 3597670..0000000 --- a/node_modules/postcss/lib/node.d.ts +++ /dev/null @@ -1,541 +0,0 @@ -import AtRule = require('./at-rule.js') - -import { AtRuleProps } from './at-rule.js' -import Comment, { CommentProps } from './comment.js' -import Container, { NewChild } from './container.js' -import CssSyntaxError from './css-syntax-error.js' -import Declaration, { DeclarationProps } from './declaration.js' -import Document from './document.js' -import Input from './input.js' -import { Stringifier, Syntax } from './postcss.js' -import Result from './result.js' -import Root from './root.js' -import Rule, { RuleProps } from './rule.js' -import Warning, { WarningOptions } from './warning.js' - -declare namespace Node { - export type ChildNode = AtRule.default | Comment | Declaration | Rule - - export type AnyNode = - | AtRule.default - | Comment - | Declaration - | Document - | Root - | Rule - - export type ChildProps = - | AtRuleProps - | CommentProps - | DeclarationProps - | RuleProps - - export interface Position { - /** - * Source line in file. In contrast to `offset` it starts from 1. - */ - column: number - - /** - * Source column in file. - */ - line: number - - /** - * Source offset in file. It starts from 0. - */ - offset: number - } - - export interface Range { - /** - * End position, exclusive. - */ - end: Position - - /** - * Start position, inclusive. - */ - start: Position - } - - /** - * Source represents an interface for the {@link Node.source} property. - */ - export interface Source { - /** - * The inclusive ending position for the source - * code of a node. - */ - end?: Position - - /** - * The source file from where a node has originated. - */ - input: Input - - /** - * The inclusive starting position for the source - * code of a node. - */ - start?: Position - } - - /** - * Interface represents an interface for an object received - * as parameter by Node class constructor. - */ - export interface NodeProps { - source?: Source - } - - export interface NodeErrorOptions { - /** - * An ending index inside a node's string that should be highlighted as - * source of error. - */ - endIndex?: number - /** - * An index inside a node's string that should be highlighted as source - * of error. - */ - index?: number - /** - * Plugin name that created this error. PostCSS will set it automatically. - */ - plugin?: string - /** - * A word inside a node's string, that should be highlighted as source - * of error. - */ - word?: string - } - - // eslint-disable-next-line @typescript-eslint/no-shadow - class Node extends Node_ {} - export { Node as default } -} - -/** - * It represents an abstract class that handles common - * methods for other CSS abstract syntax tree nodes. - * - * Any node that represents CSS selector or value should - * not extend the `Node` class. - */ -declare abstract class Node_ { - /** - * It represents parent of the current node. - * - * ```js - * root.nodes[0].parent === root //=> true - * ``` - */ - parent: Container | Document | undefined - - /** - * It represents unnecessary whitespace and characters present - * in the css source code. - * - * Information to generate byte-to-byte equal node string as it was - * in the origin input. - * - * The properties of the raws object are decided by parser, - * the default parser uses the following properties: - * - * * `before`: the space symbols before the node. It also stores `*` - * and `_` symbols before the declaration (IE hack). - * * `after`: the space symbols after the last child of the node - * to the end of the node. - * * `between`: the symbols between the property and value - * for declarations, selector and `{` for rules, or last parameter - * and `{` for at-rules. - * * `semicolon`: contains true if the last child has - * an (optional) semicolon. - * * `afterName`: the space between the at-rule name and its parameters. - * * `left`: the space symbols between `/*` and the comment’s text. - * * `right`: the space symbols between the comment’s text - * and */. - * - `important`: the content of the important statement, - * if it is not just `!important`. - * - * PostCSS filters out the comments inside selectors, declaration values - * and at-rule parameters but it stores the origin content in raws. - * - * ```js - * const root = postcss.parse('a {\n color:black\n}') - * root.first.first.raws //=> { before: '\n ', between: ':' } - * ``` - */ - raws: any - - /** - * It represents information related to origin of a node and is required - * for generating source maps. - * - * The nodes that are created manually using the public APIs - * provided by PostCSS will have `source` undefined and - * will be absent in the source map. - * - * For this reason, the plugin developer should consider - * duplicating nodes as the duplicate node will have the - * same source as the original node by default or assign - * source to a node created manually. - * - * ```js - * decl.source.input.from //=> '/home/ai/source.css' - * decl.source.start //=> { line: 10, column: 2 } - * decl.source.end //=> { line: 10, column: 12 } - * ``` - * - * ```js - * // Incorrect method, source not specified! - * const prefixed = postcss.decl({ - * prop: '-moz-' + decl.prop, - * value: decl.value - * }) - * - * // Correct method, source is inherited when duplicating. - * const prefixed = decl.clone({ - * prop: '-moz-' + decl.prop - * }) - * ``` - * - * ```js - * if (atrule.name === 'add-link') { - * const rule = postcss.rule({ - * selector: 'a', - * source: atrule.source - * }) - * - * atrule.parent.insertBefore(atrule, rule) - * } - * ``` - */ - source?: Node.Source - - /** - * It represents type of a node in - * an abstract syntax tree. - * - * A type of node helps in identification of a node - * and perform operation based on it's type. - * - * ```js - * const declaration = new Declaration({ - * prop: 'color', - * value: 'black' - * }) - * - * declaration.type //=> 'decl' - * ``` - */ - type: string - - constructor(defaults?: object) - - /** - * If this node isn't already dirty, marks it and its ancestors as such. This - * indicates to the LazyResult processor that the {@link Root} has been - * modified by the current plugin and may need to be processed again by other - * plugins. - */ - protected markDirty(): void - - /** - * Insert new node after current node to current node’s parent. - * - * Just alias for `node.parent.insertAfter(node, add)`. - * - * ```js - * decl.after('color: black') - * ``` - * - * @param newNode New node. - * @return This node for methods chain. - */ - after( - newNode: Node | Node.ChildProps | readonly Node[] | string | undefined - ): this - - /** - * It assigns properties to an existing node instance. - * - * ```js - * decl.assign({ prop: 'word-wrap', value: 'break-word' }) - * ``` - * - * @param overrides New properties to override the node. - * - * @return `this` for method chaining. - */ - assign(overrides: object): this - - /** - * Insert new node before current node to current node’s parent. - * - * Just alias for `node.parent.insertBefore(node, add)`. - * - * ```js - * decl.before('content: ""') - * ``` - * - * @param newNode New node. - * @return This node for methods chain. - */ - before( - newNode: Node | Node.ChildProps | readonly Node[] | string | undefined - ): this - - /** - * Clear the code style properties for the node and its children. - * - * ```js - * node.raws.before //=> ' ' - * node.cleanRaws() - * node.raws.before //=> undefined - * ``` - * - * @param keepBetween Keep the `raws.between` symbols. - */ - cleanRaws(keepBetween?: boolean): void - - /** - * It creates clone of an existing node, which includes all the properties - * and their values, that includes `raws` but not `type`. - * - * ```js - * decl.raws.before //=> "\n " - * const cloned = decl.clone({ prop: '-moz-' + decl.prop }) - * cloned.raws.before //=> "\n " - * cloned.toString() //=> -moz-transform: scale(0) - * ``` - * - * @param overrides New properties to override in the clone. - * - * @return Duplicate of the node instance. - */ - clone(overrides?: object): this - - /** - * Shortcut to clone the node and insert the resulting cloned node - * after the current node. - * - * @param overrides New properties to override in the clone. - * @return New node. - */ - cloneAfter(overrides?: object): this - - /** - * Shortcut to clone the node and insert the resulting cloned node - * before the current node. - * - * ```js - * decl.cloneBefore({ prop: '-moz-' + decl.prop }) - * ``` - * - * @param overrides Mew properties to override in the clone. - * - * @return New node - */ - cloneBefore(overrides?: object): this - - /** - * It creates an instance of the class `CssSyntaxError` and parameters passed - * to this method are assigned to the error instance. - * - * The error instance will have description for the - * error, original position of the node in the - * source, showing line and column number. - * - * If any previous map is present, it would be used - * to get original position of the source. - * - * The Previous Map here is referred to the source map - * generated by previous compilation, example: Less, - * Stylus and Sass. - * - * This method returns the error instance instead of - * throwing it. - * - * ```js - * if (!variables[name]) { - * throw decl.error(`Unknown variable ${name}`, { word: name }) - * // CssSyntaxError: postcss-vars:a.sass:4:3: Unknown variable $black - * // color: $black - * // a - * // ^ - * // background: white - * } - * ``` - * - * @param message Description for the error instance. - * @param options Options for the error instance. - * - * @return Error instance is returned. - */ - error(message: string, options?: Node.NodeErrorOptions): CssSyntaxError - - /** - * Returns the next child of the node’s parent. - * Returns `undefined` if the current node is the last child. - * - * ```js - * if (comment.text === 'delete next') { - * const next = comment.next() - * if (next) { - * next.remove() - * } - * } - * ``` - * - * @return Next node. - */ - next(): Node.ChildNode | undefined - - /** - * Get the position for a word or an index inside the node. - * - * @param opts Options. - * @return Position. - */ - positionBy(opts?: Pick): Node.Position - - /** - * Convert string index to line/column. - * - * @param index The symbol number in the node’s string. - * @return Symbol position in file. - */ - positionInside(index: number): Node.Position - - /** - * Returns the previous child of the node’s parent. - * Returns `undefined` if the current node is the first child. - * - * ```js - * const annotation = decl.prev() - * if (annotation.type === 'comment') { - * readAnnotation(annotation.text) - * } - * ``` - * - * @return Previous node. - */ - prev(): Node.ChildNode | undefined - - /** - * Get the range for a word or start and end index inside the node. - * The start index is inclusive; the end index is exclusive. - * - * @param opts Options. - * @return Range. - */ - rangeBy( - opts?: Pick - ): Node.Range - - /** - * Returns a `raws` value. If the node is missing - * the code style property (because the node was manually built or cloned), - * PostCSS will try to autodetect the code style property by looking - * at other nodes in the tree. - * - * ```js - * const root = postcss.parse('a { background: white }') - * root.nodes[0].append({ prop: 'color', value: 'black' }) - * root.nodes[0].nodes[1].raws.before //=> undefined - * root.nodes[0].nodes[1].raw('before') //=> ' ' - * ``` - * - * @param prop Name of code style property. - * @param defaultType Name of default value, it can be missed - * if the value is the same as prop. - * @return {string} Code style value. - */ - raw(prop: string, defaultType?: string): string - - /** - * It removes the node from its parent and deletes its parent property. - * - * ```js - * if (decl.prop.match(/^-webkit-/)) { - * decl.remove() - * } - * ``` - * - * @return `this` for method chaining. - */ - remove(): this - - /** - * Inserts node(s) before the current node and removes the current node. - * - * ```js - * AtRule: { - * mixin: atrule => { - * atrule.replaceWith(mixinRules[atrule.params]) - * } - * } - * ``` - * - * @param nodes Mode(s) to replace current one. - * @return Current node to methods chain. - */ - replaceWith(...nodes: NewChild[]): this - - /** - * Finds the Root instance of the node’s tree. - * - * ```js - * root.nodes[0].nodes[0].root() === root - * ``` - * - * @return Root parent. - */ - root(): Root - - /** - * Fix circular links on `JSON.stringify()`. - * - * @return Cleaned object. - */ - toJSON(): object - - /** - * It compiles the node to browser readable cascading style sheets string - * depending on it's type. - * - * ```js - * new Rule({ selector: 'a' }).toString() //=> "a {}" - * ``` - * - * @param stringifier A syntax to use in string generation. - * @return CSS string of this node. - */ - toString(stringifier?: Stringifier | Syntax): string - - /** - * It is a wrapper for {@link Result#warn}, providing convenient - * way of generating warnings. - * - * ```js - * Declaration: { - * bad: (decl, { result }) => { - * decl.warn(result, 'Deprecated property: bad') - * } - * } - * ``` - * - * @param result The `Result` instance that will receive the warning. - * @param message Description for the warning. - * @param options Options for the warning. - * - * @return `Warning` instance is returned - */ - warn(result: Result, message: string, options?: WarningOptions): Warning -} - -declare class Node extends Node_ {} - -export = Node diff --git a/node_modules/postcss/lib/node.js b/node_modules/postcss/lib/node.js deleted file mode 100644 index 9949be7..0000000 --- a/node_modules/postcss/lib/node.js +++ /dev/null @@ -1,425 +0,0 @@ -'use strict' - -let CssSyntaxError = require('./css-syntax-error') -let Stringifier = require('./stringifier') -let stringify = require('./stringify') -let { isClean, my } = require('./symbols') - -function cloneNode(obj, parent) { - let cloned = new obj.constructor() - - for (let i in obj) { - if (!Object.prototype.hasOwnProperty.call(obj, i)) { - /* c8 ignore next 2 */ - continue - } - if (i === 'proxyCache') continue - let value = obj[i] - let type = typeof value - - if (i === 'parent' && type === 'object') { - if (parent) cloned[i] = parent - } else if (i === 'source') { - cloned[i] = value - } else if (Array.isArray(value)) { - cloned[i] = value.map(j => cloneNode(j, cloned)) - } else { - if (type === 'object' && value !== null) value = cloneNode(value) - cloned[i] = value - } - } - - return cloned -} - -function sourceOffset(inputCSS, position) { - // Not all custom syntaxes support `offset` in `source.start` and `source.end` - if ( - position && - typeof position.offset !== 'undefined' - ) { - return position.offset; - } - - let column = 1 - let line = 1 - let offset = 0 - - for (let i = 0; i < inputCSS.length; i++) { - if (line === position.line && column === position.column) { - offset = i - break - } - - if (inputCSS[i] === '\n') { - column = 1 - line += 1 - } else { - column += 1 - } - } - - return offset -} - -class Node { - constructor(defaults = {}) { - this.raws = {} - this[isClean] = false - this[my] = true - - for (let name in defaults) { - if (name === 'nodes') { - this.nodes = [] - for (let node of defaults[name]) { - if (typeof node.clone === 'function') { - this.append(node.clone()) - } else { - this.append(node) - } - } - } else { - this[name] = defaults[name] - } - } - } - - addToError(error) { - error.postcssNode = this - if (error.stack && this.source && /\n\s{4}at /.test(error.stack)) { - let s = this.source - error.stack = error.stack.replace( - /\n\s{4}at /, - `$&${s.input.from}:${s.start.line}:${s.start.column}$&` - ) - } - return error - } - - after(add) { - this.parent.insertAfter(this, add) - return this - } - - assign(overrides = {}) { - for (let name in overrides) { - this[name] = overrides[name] - } - return this - } - - before(add) { - this.parent.insertBefore(this, add) - return this - } - - cleanRaws(keepBetween) { - delete this.raws.before - delete this.raws.after - if (!keepBetween) delete this.raws.between - } - - clone(overrides = {}) { - let cloned = cloneNode(this) - for (let name in overrides) { - cloned[name] = overrides[name] - } - return cloned - } - - cloneAfter(overrides = {}) { - let cloned = this.clone(overrides) - this.parent.insertAfter(this, cloned) - return cloned - } - - cloneBefore(overrides = {}) { - let cloned = this.clone(overrides) - this.parent.insertBefore(this, cloned) - return cloned - } - - error(message, opts = {}) { - if (this.source) { - let { end, start } = this.rangeBy(opts) - return this.source.input.error( - message, - { column: start.column, line: start.line }, - { column: end.column, line: end.line }, - opts - ) - } - return new CssSyntaxError(message) - } - - getProxyProcessor() { - return { - get(node, prop) { - if (prop === 'proxyOf') { - return node - } else if (prop === 'root') { - return () => node.root().toProxy() - } else { - return node[prop] - } - }, - - set(node, prop, value) { - if (node[prop] === value) return true - node[prop] = value - if ( - prop === 'prop' || - prop === 'value' || - prop === 'name' || - prop === 'params' || - prop === 'important' || - /* c8 ignore next */ - prop === 'text' - ) { - node.markDirty() - } - return true - } - } - } - - /* c8 ignore next 3 */ - markClean() { - this[isClean] = true - } - - markDirty() { - if (this[isClean]) { - this[isClean] = false - let next = this - while ((next = next.parent)) { - next[isClean] = false - } - } - } - - next() { - if (!this.parent) return undefined - let index = this.parent.index(this) - return this.parent.nodes[index + 1] - } - - positionBy(opts) { - let pos = this.source.start - if (opts.index) { - pos = this.positionInside(opts.index) - } else if (opts.word) { - let stringRepresentation = this.source.input.css.slice( - sourceOffset(this.source.input.css, this.source.start), - sourceOffset(this.source.input.css, this.source.end) - ) - let index = stringRepresentation.indexOf(opts.word) - if (index !== -1) pos = this.positionInside(index) - } - return pos - } - - positionInside(index) { - let column = this.source.start.column - let line = this.source.start.line - let offset = sourceOffset(this.source.input.css, this.source.start) - let end = offset + index - - for (let i = offset; i < end; i++) { - if (this.source.input.css[i] === '\n') { - column = 1 - line += 1 - } else { - column += 1 - } - } - - return { column, line } - } - - prev() { - if (!this.parent) return undefined - let index = this.parent.index(this) - return this.parent.nodes[index - 1] - } - - rangeBy(opts) { - let start = { - column: this.source.start.column, - line: this.source.start.line - } - let end = this.source.end - ? { - column: this.source.end.column + 1, - line: this.source.end.line - } - : { - column: start.column + 1, - line: start.line - } - - if (opts.word) { - let stringRepresentation = this.source.input.css.slice( - sourceOffset(this.source.input.css, this.source.start), - sourceOffset(this.source.input.css, this.source.end) - ) - let index = stringRepresentation.indexOf(opts.word) - if (index !== -1) { - start = this.positionInside(index) - end = this.positionInside( - index + opts.word.length, - ) - } - } else { - if (opts.start) { - start = { - column: opts.start.column, - line: opts.start.line - } - } else if (opts.index) { - start = this.positionInside(opts.index) - } - - if (opts.end) { - end = { - column: opts.end.column, - line: opts.end.line - } - } else if (typeof opts.endIndex === 'number') { - end = this.positionInside(opts.endIndex) - } else if (opts.index) { - end = this.positionInside(opts.index + 1) - } - } - - if ( - end.line < start.line || - (end.line === start.line && end.column <= start.column) - ) { - end = { column: start.column + 1, line: start.line } - } - - return { end, start } - } - - raw(prop, defaultType) { - let str = new Stringifier() - return str.raw(this, prop, defaultType) - } - - remove() { - if (this.parent) { - this.parent.removeChild(this) - } - this.parent = undefined - return this - } - - replaceWith(...nodes) { - if (this.parent) { - let bookmark = this - let foundSelf = false - for (let node of nodes) { - if (node === this) { - foundSelf = true - } else if (foundSelf) { - this.parent.insertAfter(bookmark, node) - bookmark = node - } else { - this.parent.insertBefore(bookmark, node) - } - } - - if (!foundSelf) { - this.remove() - } - } - - return this - } - - root() { - let result = this - while (result.parent && result.parent.type !== 'document') { - result = result.parent - } - return result - } - - toJSON(_, inputs) { - let fixed = {} - let emitInputs = inputs == null - inputs = inputs || new Map() - let inputsNextIndex = 0 - - for (let name in this) { - if (!Object.prototype.hasOwnProperty.call(this, name)) { - /* c8 ignore next 2 */ - continue - } - if (name === 'parent' || name === 'proxyCache') continue - let value = this[name] - - if (Array.isArray(value)) { - fixed[name] = value.map(i => { - if (typeof i === 'object' && i.toJSON) { - return i.toJSON(null, inputs) - } else { - return i - } - }) - } else if (typeof value === 'object' && value.toJSON) { - fixed[name] = value.toJSON(null, inputs) - } else if (name === 'source') { - let inputId = inputs.get(value.input) - if (inputId == null) { - inputId = inputsNextIndex - inputs.set(value.input, inputsNextIndex) - inputsNextIndex++ - } - fixed[name] = { - end: value.end, - inputId, - start: value.start - } - } else { - fixed[name] = value - } - } - - if (emitInputs) { - fixed.inputs = [...inputs.keys()].map(input => input.toJSON()) - } - - return fixed - } - - toProxy() { - if (!this.proxyCache) { - this.proxyCache = new Proxy(this, this.getProxyProcessor()) - } - return this.proxyCache - } - - toString(stringifier = stringify) { - if (stringifier.stringify) stringifier = stringifier.stringify - let result = '' - stringifier(this, i => { - result += i - }) - return result - } - - warn(result, text, opts) { - let data = { node: this } - for (let i in opts) data[i] = opts[i] - return result.warn(text, data) - } - - get proxyOf() { - return this - } -} - -module.exports = Node -Node.default = Node diff --git a/node_modules/postcss/lib/parse.d.ts b/node_modules/postcss/lib/parse.d.ts deleted file mode 100644 index 4c943a4..0000000 --- a/node_modules/postcss/lib/parse.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Parser } from './postcss.js' - -interface Parse extends Parser { - default: Parse -} - -declare const parse: Parse - -export = parse diff --git a/node_modules/postcss/lib/parse.js b/node_modules/postcss/lib/parse.js deleted file mode 100644 index 00a1037..0000000 --- a/node_modules/postcss/lib/parse.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -let Container = require('./container') -let Input = require('./input') -let Parser = require('./parser') - -function parse(css, opts) { - let input = new Input(css, opts) - let parser = new Parser(input) - try { - parser.parse() - } catch (e) { - if (process.env.NODE_ENV !== 'production') { - if (e.name === 'CssSyntaxError' && opts && opts.from) { - if (/\.scss$/i.test(opts.from)) { - e.message += - '\nYou tried to parse SCSS with ' + - 'the standard CSS parser; ' + - 'try again with the postcss-scss parser' - } else if (/\.sass/i.test(opts.from)) { - e.message += - '\nYou tried to parse Sass with ' + - 'the standard CSS parser; ' + - 'try again with the postcss-sass parser' - } else if (/\.less$/i.test(opts.from)) { - e.message += - '\nYou tried to parse Less with ' + - 'the standard CSS parser; ' + - 'try again with the postcss-less parser' - } - } - } - throw e - } - - return parser.root -} - -module.exports = parse -parse.default = parse - -Container.registerParse(parse) diff --git a/node_modules/postcss/lib/parser.js b/node_modules/postcss/lib/parser.js deleted file mode 100644 index 8a04411..0000000 --- a/node_modules/postcss/lib/parser.js +++ /dev/null @@ -1,609 +0,0 @@ -'use strict' - -let AtRule = require('./at-rule') -let Comment = require('./comment') -let Declaration = require('./declaration') -let Root = require('./root') -let Rule = require('./rule') -let tokenizer = require('./tokenize') - -const SAFE_COMMENT_NEIGHBOR = { - empty: true, - space: true -} - -function findLastWithPosition(tokens) { - for (let i = tokens.length - 1; i >= 0; i--) { - let token = tokens[i] - let pos = token[3] || token[2] - if (pos) return pos - } -} - -class Parser { - constructor(input) { - this.input = input - - this.root = new Root() - this.current = this.root - this.spaces = '' - this.semicolon = false - - this.createTokenizer() - this.root.source = { input, start: { column: 1, line: 1, offset: 0 } } - } - - atrule(token) { - let node = new AtRule() - node.name = token[1].slice(1) - if (node.name === '') { - this.unnamedAtrule(node, token) - } - this.init(node, token[2]) - - let type - let prev - let shift - let last = false - let open = false - let params = [] - let brackets = [] - - while (!this.tokenizer.endOfFile()) { - token = this.tokenizer.nextToken() - type = token[0] - - if (type === '(' || type === '[') { - brackets.push(type === '(' ? ')' : ']') - } else if (type === '{' && brackets.length > 0) { - brackets.push('}') - } else if (type === brackets[brackets.length - 1]) { - brackets.pop() - } - - if (brackets.length === 0) { - if (type === ';') { - node.source.end = this.getPosition(token[2]) - node.source.end.offset++ - this.semicolon = true - break - } else if (type === '{') { - open = true - break - } else if (type === '}') { - if (params.length > 0) { - shift = params.length - 1 - prev = params[shift] - while (prev && prev[0] === 'space') { - prev = params[--shift] - } - if (prev) { - node.source.end = this.getPosition(prev[3] || prev[2]) - node.source.end.offset++ - } - } - this.end(token) - break - } else { - params.push(token) - } - } else { - params.push(token) - } - - if (this.tokenizer.endOfFile()) { - last = true - break - } - } - - node.raws.between = this.spacesAndCommentsFromEnd(params) - if (params.length) { - node.raws.afterName = this.spacesAndCommentsFromStart(params) - this.raw(node, 'params', params) - if (last) { - token = params[params.length - 1] - node.source.end = this.getPosition(token[3] || token[2]) - node.source.end.offset++ - this.spaces = node.raws.between - node.raws.between = '' - } - } else { - node.raws.afterName = '' - node.params = '' - } - - if (open) { - node.nodes = [] - this.current = node - } - } - - checkMissedSemicolon(tokens) { - let colon = this.colon(tokens) - if (colon === false) return - - let founded = 0 - let token - for (let j = colon - 1; j >= 0; j--) { - token = tokens[j] - if (token[0] !== 'space') { - founded += 1 - if (founded === 2) break - } - } - // If the token is a word, e.g. `!important`, `red` or any other valid property's value. - // Then we need to return the colon after that word token. [3] is the "end" colon of that word. - // And because we need it after that one we do +1 to get the next one. - throw this.input.error( - 'Missed semicolon', - token[0] === 'word' ? token[3] + 1 : token[2] - ) - } - - colon(tokens) { - let brackets = 0 - let prev, token, type - for (let [i, element] of tokens.entries()) { - token = element - type = token[0] - - if (type === '(') { - brackets += 1 - } - if (type === ')') { - brackets -= 1 - } - if (brackets === 0 && type === ':') { - if (!prev) { - this.doubleColon(token) - } else if (prev[0] === 'word' && prev[1] === 'progid') { - continue - } else { - return i - } - } - - prev = token - } - return false - } - - comment(token) { - let node = new Comment() - this.init(node, token[2]) - node.source.end = this.getPosition(token[3] || token[2]) - node.source.end.offset++ - - let text = token[1].slice(2, -2) - if (/^\s*$/.test(text)) { - node.text = '' - node.raws.left = text - node.raws.right = '' - } else { - let match = text.match(/^(\s*)([^]*\S)(\s*)$/) - node.text = match[2] - node.raws.left = match[1] - node.raws.right = match[3] - } - } - - createTokenizer() { - this.tokenizer = tokenizer(this.input) - } - - decl(tokens, customProperty) { - let node = new Declaration() - this.init(node, tokens[0][2]) - - let last = tokens[tokens.length - 1] - if (last[0] === ';') { - this.semicolon = true - tokens.pop() - } - - node.source.end = this.getPosition( - last[3] || last[2] || findLastWithPosition(tokens) - ) - node.source.end.offset++ - - while (tokens[0][0] !== 'word') { - if (tokens.length === 1) this.unknownWord(tokens) - node.raws.before += tokens.shift()[1] - } - node.source.start = this.getPosition(tokens[0][2]) - - node.prop = '' - while (tokens.length) { - let type = tokens[0][0] - if (type === ':' || type === 'space' || type === 'comment') { - break - } - node.prop += tokens.shift()[1] - } - - node.raws.between = '' - - let token - while (tokens.length) { - token = tokens.shift() - - if (token[0] === ':') { - node.raws.between += token[1] - break - } else { - if (token[0] === 'word' && /\w/.test(token[1])) { - this.unknownWord([token]) - } - node.raws.between += token[1] - } - } - - if (node.prop[0] === '_' || node.prop[0] === '*') { - node.raws.before += node.prop[0] - node.prop = node.prop.slice(1) - } - - let firstSpaces = [] - let next - while (tokens.length) { - next = tokens[0][0] - if (next !== 'space' && next !== 'comment') break - firstSpaces.push(tokens.shift()) - } - - this.precheckMissedSemicolon(tokens) - - for (let i = tokens.length - 1; i >= 0; i--) { - token = tokens[i] - if (token[1].toLowerCase() === '!important') { - node.important = true - let string = this.stringFrom(tokens, i) - string = this.spacesFromEnd(tokens) + string - if (string !== ' !important') node.raws.important = string - break - } else if (token[1].toLowerCase() === 'important') { - let cache = tokens.slice(0) - let str = '' - for (let j = i; j > 0; j--) { - let type = cache[j][0] - if (str.trim().startsWith('!') && type !== 'space') { - break - } - str = cache.pop()[1] + str - } - if (str.trim().startsWith('!')) { - node.important = true - node.raws.important = str - tokens = cache - } - } - - if (token[0] !== 'space' && token[0] !== 'comment') { - break - } - } - - let hasWord = tokens.some(i => i[0] !== 'space' && i[0] !== 'comment') - - if (hasWord) { - node.raws.between += firstSpaces.map(i => i[1]).join('') - firstSpaces = [] - } - this.raw(node, 'value', firstSpaces.concat(tokens), customProperty) - - if (node.value.includes(':') && !customProperty) { - this.checkMissedSemicolon(tokens) - } - } - - doubleColon(token) { - throw this.input.error( - 'Double colon', - { offset: token[2] }, - { offset: token[2] + token[1].length } - ) - } - - emptyRule(token) { - let node = new Rule() - this.init(node, token[2]) - node.selector = '' - node.raws.between = '' - this.current = node - } - - end(token) { - if (this.current.nodes && this.current.nodes.length) { - this.current.raws.semicolon = this.semicolon - } - this.semicolon = false - - this.current.raws.after = (this.current.raws.after || '') + this.spaces - this.spaces = '' - - if (this.current.parent) { - this.current.source.end = this.getPosition(token[2]) - this.current.source.end.offset++ - this.current = this.current.parent - } else { - this.unexpectedClose(token) - } - } - - endFile() { - if (this.current.parent) this.unclosedBlock() - if (this.current.nodes && this.current.nodes.length) { - this.current.raws.semicolon = this.semicolon - } - this.current.raws.after = (this.current.raws.after || '') + this.spaces - this.root.source.end = this.getPosition(this.tokenizer.position()) - } - - freeSemicolon(token) { - this.spaces += token[1] - if (this.current.nodes) { - let prev = this.current.nodes[this.current.nodes.length - 1] - if (prev && prev.type === 'rule' && !prev.raws.ownSemicolon) { - prev.raws.ownSemicolon = this.spaces - this.spaces = '' - } - } - } - - // Helpers - - getPosition(offset) { - let pos = this.input.fromOffset(offset) - return { - column: pos.col, - line: pos.line, - offset - } - } - - init(node, offset) { - this.current.push(node) - node.source = { - input: this.input, - start: this.getPosition(offset) - } - node.raws.before = this.spaces - this.spaces = '' - if (node.type !== 'comment') this.semicolon = false - } - - other(start) { - let end = false - let type = null - let colon = false - let bracket = null - let brackets = [] - let customProperty = start[1].startsWith('--') - - let tokens = [] - let token = start - while (token) { - type = token[0] - tokens.push(token) - - if (type === '(' || type === '[') { - if (!bracket) bracket = token - brackets.push(type === '(' ? ')' : ']') - } else if (customProperty && colon && type === '{') { - if (!bracket) bracket = token - brackets.push('}') - } else if (brackets.length === 0) { - if (type === ';') { - if (colon) { - this.decl(tokens, customProperty) - return - } else { - break - } - } else if (type === '{') { - this.rule(tokens) - return - } else if (type === '}') { - this.tokenizer.back(tokens.pop()) - end = true - break - } else if (type === ':') { - colon = true - } - } else if (type === brackets[brackets.length - 1]) { - brackets.pop() - if (brackets.length === 0) bracket = null - } - - token = this.tokenizer.nextToken() - } - - if (this.tokenizer.endOfFile()) end = true - if (brackets.length > 0) this.unclosedBracket(bracket) - - if (end && colon) { - if (!customProperty) { - while (tokens.length) { - token = tokens[tokens.length - 1][0] - if (token !== 'space' && token !== 'comment') break - this.tokenizer.back(tokens.pop()) - } - } - this.decl(tokens, customProperty) - } else { - this.unknownWord(tokens) - } - } - - parse() { - let token - while (!this.tokenizer.endOfFile()) { - token = this.tokenizer.nextToken() - - switch (token[0]) { - case 'space': - this.spaces += token[1] - break - - case ';': - this.freeSemicolon(token) - break - - case '}': - this.end(token) - break - - case 'comment': - this.comment(token) - break - - case 'at-word': - this.atrule(token) - break - - case '{': - this.emptyRule(token) - break - - default: - this.other(token) - break - } - } - this.endFile() - } - - precheckMissedSemicolon(/* tokens */) { - // Hook for Safe Parser - } - - raw(node, prop, tokens, customProperty) { - let token, type - let length = tokens.length - let value = '' - let clean = true - let next, prev - - for (let i = 0; i < length; i += 1) { - token = tokens[i] - type = token[0] - if (type === 'space' && i === length - 1 && !customProperty) { - clean = false - } else if (type === 'comment') { - prev = tokens[i - 1] ? tokens[i - 1][0] : 'empty' - next = tokens[i + 1] ? tokens[i + 1][0] : 'empty' - if (!SAFE_COMMENT_NEIGHBOR[prev] && !SAFE_COMMENT_NEIGHBOR[next]) { - if (value.slice(-1) === ',') { - clean = false - } else { - value += token[1] - } - } else { - clean = false - } - } else { - value += token[1] - } - } - if (!clean) { - let raw = tokens.reduce((all, i) => all + i[1], '') - node.raws[prop] = { raw, value } - } - node[prop] = value - } - - rule(tokens) { - tokens.pop() - - let node = new Rule() - this.init(node, tokens[0][2]) - - node.raws.between = this.spacesAndCommentsFromEnd(tokens) - this.raw(node, 'selector', tokens) - this.current = node - } - - spacesAndCommentsFromEnd(tokens) { - let lastTokenType - let spaces = '' - while (tokens.length) { - lastTokenType = tokens[tokens.length - 1][0] - if (lastTokenType !== 'space' && lastTokenType !== 'comment') break - spaces = tokens.pop()[1] + spaces - } - return spaces - } - - // Errors - - spacesAndCommentsFromStart(tokens) { - let next - let spaces = '' - while (tokens.length) { - next = tokens[0][0] - if (next !== 'space' && next !== 'comment') break - spaces += tokens.shift()[1] - } - return spaces - } - - spacesFromEnd(tokens) { - let lastTokenType - let spaces = '' - while (tokens.length) { - lastTokenType = tokens[tokens.length - 1][0] - if (lastTokenType !== 'space') break - spaces = tokens.pop()[1] + spaces - } - return spaces - } - - stringFrom(tokens, from) { - let result = '' - for (let i = from; i < tokens.length; i++) { - result += tokens[i][1] - } - tokens.splice(from, tokens.length - from) - return result - } - - unclosedBlock() { - let pos = this.current.source.start - throw this.input.error('Unclosed block', pos.line, pos.column) - } - - unclosedBracket(bracket) { - throw this.input.error( - 'Unclosed bracket', - { offset: bracket[2] }, - { offset: bracket[2] + 1 } - ) - } - - unexpectedClose(token) { - throw this.input.error( - 'Unexpected }', - { offset: token[2] }, - { offset: token[2] + 1 } - ) - } - - unknownWord(tokens) { - throw this.input.error( - 'Unknown word', - { offset: tokens[0][2] }, - { offset: tokens[0][2] + tokens[0][1].length } - ) - } - - unnamedAtrule(node, token) { - throw this.input.error( - 'At-rule without name', - { offset: token[2] }, - { offset: token[2] + token[1].length } - ) - } -} - -module.exports = Parser diff --git a/node_modules/postcss/lib/postcss.d.mts b/node_modules/postcss/lib/postcss.d.mts deleted file mode 100644 index 4cf5b49..0000000 --- a/node_modules/postcss/lib/postcss.d.mts +++ /dev/null @@ -1,69 +0,0 @@ -export { - // postcss function / namespace - default, - - // Value exports from postcss.mjs - stringify, - fromJSON, - // @ts-expect-error This value exists, but it’s untyped. - plugin, - parse, - list, - document, - comment, - atRule, - rule, - decl, - root, - CssSyntaxError, - Declaration, - Container, - Processor, - Document, - Comment, - Warning, - AtRule, - Result, - Input, - Rule, - Root, - Node, - - // Type-only exports - AcceptedPlugin, - AnyNode, - AtRuleProps, - Builder, - ChildNode, - ChildProps, - CommentProps, - ContainerProps, - DeclarationProps, - DocumentProps, - FilePosition, - Helpers, - JSONHydrator, - Message, - NodeErrorOptions, - NodeProps, - OldPlugin, - Parser, - Plugin, - PluginCreator, - Position, - Postcss, - ProcessOptions, - RootProps, - RuleProps, - Source, - SourceMap, - SourceMapOptions, - Stringifier, - Syntax, - TransformCallback, - Transformer, - WarningOptions, - - // This is a class, but it’s not re-exported. That’s why it’s exported as type-only here. - type LazyResult -} from './postcss.js' diff --git a/node_modules/postcss/lib/postcss.d.ts b/node_modules/postcss/lib/postcss.d.ts deleted file mode 100644 index d0b8b53..0000000 --- a/node_modules/postcss/lib/postcss.d.ts +++ /dev/null @@ -1,453 +0,0 @@ -import { RawSourceMap, SourceMapGenerator } from 'source-map-js' - -import AtRule, { AtRuleProps } from './at-rule.js' -import Comment, { CommentProps } from './comment.js' -import Container, { ContainerProps, NewChild } from './container.js' -import CssSyntaxError from './css-syntax-error.js' -import Declaration, { DeclarationProps } from './declaration.js' -import Document, { DocumentProps } from './document.js' -import Input, { FilePosition } from './input.js' -import LazyResult from './lazy-result.js' -import list from './list.js' -import Node, { - AnyNode, - ChildNode, - ChildProps, - NodeErrorOptions, - NodeProps, - Position, - Source -} from './node.js' -import Processor from './processor.js' -import Result, { Message } from './result.js' -import Root, { RootProps } from './root.js' -import Rule, { RuleProps } from './rule.js' -import Warning, { WarningOptions } from './warning.js' - -type DocumentProcessor = ( - document: Document, - helper: postcss.Helpers -) => Promise | void -type RootProcessor = ( - root: Root, - helper: postcss.Helpers -) => Promise | void -type DeclarationProcessor = ( - decl: Declaration, - helper: postcss.Helpers -) => Promise | void -type RuleProcessor = ( - rule: Rule, - helper: postcss.Helpers -) => Promise | void -type AtRuleProcessor = ( - atRule: AtRule, - helper: postcss.Helpers -) => Promise | void -type CommentProcessor = ( - comment: Comment, - helper: postcss.Helpers -) => Promise | void - -interface Processors { - /** - * Will be called on all`AtRule` nodes. - * - * Will be called again on node or children changes. - */ - AtRule?: { [name: string]: AtRuleProcessor } | AtRuleProcessor - - /** - * Will be called on all `AtRule` nodes, when all children will be processed. - * - * Will be called again on node or children changes. - */ - AtRuleExit?: { [name: string]: AtRuleProcessor } | AtRuleProcessor - - /** - * Will be called on all `Comment` nodes. - * - * Will be called again on node or children changes. - */ - Comment?: CommentProcessor - - /** - * Will be called on all `Comment` nodes after listeners - * for `Comment` event. - * - * Will be called again on node or children changes. - */ - CommentExit?: CommentProcessor - - /** - * Will be called on all `Declaration` nodes after listeners - * for `Declaration` event. - * - * Will be called again on node or children changes. - */ - Declaration?: { [prop: string]: DeclarationProcessor } | DeclarationProcessor - - /** - * Will be called on all `Declaration` nodes. - * - * Will be called again on node or children changes. - */ - DeclarationExit?: - | { [prop: string]: DeclarationProcessor } - | DeclarationProcessor - - /** - * Will be called on `Document` node. - * - * Will be called again on children changes. - */ - Document?: DocumentProcessor - - /** - * Will be called on `Document` node, when all children will be processed. - * - * Will be called again on children changes. - */ - DocumentExit?: DocumentProcessor - - /** - * Will be called on `Root` node once. - */ - Once?: RootProcessor - - /** - * Will be called on `Root` node once, when all children will be processed. - */ - OnceExit?: RootProcessor - - /** - * Will be called on `Root` node. - * - * Will be called again on children changes. - */ - Root?: RootProcessor - - /** - * Will be called on `Root` node, when all children will be processed. - * - * Will be called again on children changes. - */ - RootExit?: RootProcessor - - /** - * Will be called on all `Rule` nodes. - * - * Will be called again on node or children changes. - */ - Rule?: RuleProcessor - - /** - * Will be called on all `Rule` nodes, when all children will be processed. - * - * Will be called again on node or children changes. - */ - RuleExit?: RuleProcessor -} - -declare namespace postcss { - export { - AnyNode, - AtRule, - AtRuleProps, - ChildNode, - ChildProps, - Comment, - CommentProps, - Container, - ContainerProps, - CssSyntaxError, - Declaration, - DeclarationProps, - Document, - DocumentProps, - FilePosition, - Input, - LazyResult, - list, - Message, - NewChild, - Node, - NodeErrorOptions, - NodeProps, - Position, - Processor, - Result, - Root, - RootProps, - Rule, - RuleProps, - Source, - Warning, - WarningOptions - } - - export type SourceMap = { - toJSON(): RawSourceMap - } & SourceMapGenerator - - export type Helpers = { postcss: Postcss; result: Result } & Postcss - - export interface Plugin extends Processors { - postcssPlugin: string - prepare?: (result: Result) => Processors - } - - export interface PluginCreator { - (opts?: PluginOptions): Plugin | Processor - postcss: true - } - - export interface Transformer extends TransformCallback { - postcssPlugin: string - postcssVersion: string - } - - export interface TransformCallback { - (root: Root, result: Result): Promise | void - } - - export interface OldPlugin extends Transformer { - (opts?: T): Transformer - postcss: Transformer - } - - export type AcceptedPlugin = - | { - postcss: Processor | TransformCallback - } - | OldPlugin - | Plugin - | PluginCreator - | Processor - | TransformCallback - - export interface Parser { - ( - css: { toString(): string } | string, - opts?: Pick - ): RootNode - } - - export interface Builder { - (part: string, node?: AnyNode, type?: 'end' | 'start'): void - } - - export interface Stringifier { - (node: AnyNode, builder: Builder): void - } - - export interface JSONHydrator { - (data: object): Node - (data: object[]): Node[] - } - - export interface Syntax { - /** - * Function to generate AST by string. - */ - parse?: Parser - - /** - * Class to generate string by AST. - */ - stringify?: Stringifier - } - - export interface SourceMapOptions { - /** - * Use absolute path in generated source map. - */ - absolute?: boolean - - /** - * Indicates that PostCSS should add annotation comments to the CSS. - * By default, PostCSS will always add a comment with a path - * to the source map. PostCSS will not add annotations to CSS files - * that do not contain any comments. - * - * By default, PostCSS presumes that you want to save the source map as - * `opts.to + '.map'` and will use this path in the annotation comment. - * A different path can be set by providing a string value for annotation. - * - * If you have set `inline: true`, annotation cannot be disabled. - */ - annotation?: ((file: string, root: Root) => string) | boolean | string - - /** - * Override `from` in map’s sources. - */ - from?: string - - /** - * Indicates that the source map should be embedded in the output CSS - * as a Base64-encoded comment. By default, it is `true`. - * But if all previous maps are external, not inline, PostCSS will not embed - * the map even if you do not set this option. - * - * If you have an inline source map, the result.map property will be empty, - * as the source map will be contained within the text of `result.css`. - */ - inline?: boolean - - /** - * Source map content from a previous processing step (e.g., Sass). - * - * PostCSS will try to read the previous source map - * automatically (based on comments within the source CSS), but you can use - * this option to identify it manually. - * - * If desired, you can omit the previous map with prev: `false`. - */ - prev?: ((file: string) => string) | boolean | object | string - - /** - * Indicates that PostCSS should set the origin content (e.g., Sass source) - * of the source map. By default, it is true. But if all previous maps do not - * contain sources content, PostCSS will also leave it out even if you - * do not set this option. - */ - sourcesContent?: boolean - } - - export interface ProcessOptions { - /** - * The path of the CSS source file. You should always set `from`, - * because it is used in source map generation and syntax error messages. - */ - from?: string | undefined - - /** - * Source map options - */ - map?: boolean | SourceMapOptions - - /** - * Function to generate AST by string. - */ - parser?: Parser | Syntax - - /** - * Class to generate string by AST. - */ - stringifier?: Stringifier | Syntax - - /** - * Object with parse and stringify. - */ - syntax?: Syntax - - /** - * The path where you'll put the output CSS file. You should always set `to` - * to generate correct source maps. - */ - to?: string - } - - export type Postcss = typeof postcss - - /** - * Default function to convert a node tree into a CSS string. - */ - export let stringify: Stringifier - - /** - * Parses source css and returns a new `Root` or `Document` node, - * which contains the source CSS nodes. - * - * ```js - * // Simple CSS concatenation with source map support - * const root1 = postcss.parse(css1, { from: file1 }) - * const root2 = postcss.parse(css2, { from: file2 }) - * root1.append(root2).toResult().css - * ``` - */ - export let parse: Parser - - /** - * Rehydrate a JSON AST (from `Node#toJSON`) back into the AST classes. - * - * ```js - * const json = root.toJSON() - * // save to file, send by network, etc - * const root2 = postcss.fromJSON(json) - * ``` - */ - export let fromJSON: JSONHydrator - - /** - * Creates a new `Comment` node. - * - * @param defaults Properties for the new node. - * @return New comment node - */ - export function comment(defaults?: CommentProps): Comment - - /** - * Creates a new `AtRule` node. - * - * @param defaults Properties for the new node. - * @return New at-rule node. - */ - export function atRule(defaults?: AtRuleProps): AtRule - - /** - * Creates a new `Declaration` node. - * - * @param defaults Properties for the new node. - * @return New declaration node. - */ - export function decl(defaults?: DeclarationProps): Declaration - - /** - * Creates a new `Rule` node. - * - * @param default Properties for the new node. - * @return New rule node. - */ - export function rule(defaults?: RuleProps): Rule - - /** - * Creates a new `Root` node. - * - * @param defaults Properties for the new node. - * @return New root node. - */ - export function root(defaults?: RootProps): Root - - /** - * Creates a new `Document` node. - * - * @param defaults Properties for the new node. - * @return New document node. - */ - export function document(defaults?: DocumentProps): Document - - export { postcss as default } -} - -/** - * Create a new `Processor` instance that will apply `plugins` - * as CSS processors. - * - * ```js - * let postcss = require('postcss') - * - * postcss(plugins).process(css, { from, to }).then(result => { - * console.log(result.css) - * }) - * ``` - * - * @param plugins PostCSS plugins. - * @return Processor to process multiple CSS. - */ -declare function postcss( - plugins?: readonly postcss.AcceptedPlugin[] -): Processor -declare function postcss(...plugins: postcss.AcceptedPlugin[]): Processor - -export = postcss diff --git a/node_modules/postcss/lib/postcss.js b/node_modules/postcss/lib/postcss.js deleted file mode 100644 index 8f0773b..0000000 --- a/node_modules/postcss/lib/postcss.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict' - -let AtRule = require('./at-rule') -let Comment = require('./comment') -let Container = require('./container') -let CssSyntaxError = require('./css-syntax-error') -let Declaration = require('./declaration') -let Document = require('./document') -let fromJSON = require('./fromJSON') -let Input = require('./input') -let LazyResult = require('./lazy-result') -let list = require('./list') -let Node = require('./node') -let parse = require('./parse') -let Processor = require('./processor') -let Result = require('./result.js') -let Root = require('./root') -let Rule = require('./rule') -let stringify = require('./stringify') -let Warning = require('./warning') - -function postcss(...plugins) { - if (plugins.length === 1 && Array.isArray(plugins[0])) { - plugins = plugins[0] - } - return new Processor(plugins) -} - -postcss.plugin = function plugin(name, initializer) { - let warningPrinted = false - function creator(...args) { - // eslint-disable-next-line no-console - if (console && console.warn && !warningPrinted) { - warningPrinted = true - // eslint-disable-next-line no-console - console.warn( - name + - ': postcss.plugin was deprecated. Migration guide:\n' + - 'https://evilmartians.com/chronicles/postcss-8-plugin-migration' - ) - if (process.env.LANG && process.env.LANG.startsWith('cn')) { - /* c8 ignore next 7 */ - // eslint-disable-next-line no-console - console.warn( - name + - ': 里面 postcss.plugin 被弃用. 迁移指南:\n' + - 'https://www.w3ctech.com/topic/2226' - ) - } - } - let transformer = initializer(...args) - transformer.postcssPlugin = name - transformer.postcssVersion = new Processor().version - return transformer - } - - let cache - Object.defineProperty(creator, 'postcss', { - get() { - if (!cache) cache = creator() - return cache - } - }) - - creator.process = function (css, processOpts, pluginOpts) { - return postcss([creator(pluginOpts)]).process(css, processOpts) - } - - return creator -} - -postcss.stringify = stringify -postcss.parse = parse -postcss.fromJSON = fromJSON -postcss.list = list - -postcss.comment = defaults => new Comment(defaults) -postcss.atRule = defaults => new AtRule(defaults) -postcss.decl = defaults => new Declaration(defaults) -postcss.rule = defaults => new Rule(defaults) -postcss.root = defaults => new Root(defaults) -postcss.document = defaults => new Document(defaults) - -postcss.CssSyntaxError = CssSyntaxError -postcss.Declaration = Declaration -postcss.Container = Container -postcss.Processor = Processor -postcss.Document = Document -postcss.Comment = Comment -postcss.Warning = Warning -postcss.AtRule = AtRule -postcss.Result = Result -postcss.Input = Input -postcss.Rule = Rule -postcss.Root = Root -postcss.Node = Node - -LazyResult.registerPostcss(postcss) - -module.exports = postcss -postcss.default = postcss diff --git a/node_modules/postcss/lib/postcss.mjs b/node_modules/postcss/lib/postcss.mjs deleted file mode 100644 index 3507598..0000000 --- a/node_modules/postcss/lib/postcss.mjs +++ /dev/null @@ -1,30 +0,0 @@ -import postcss from './postcss.js' - -export default postcss - -export const stringify = postcss.stringify -export const fromJSON = postcss.fromJSON -export const plugin = postcss.plugin -export const parse = postcss.parse -export const list = postcss.list - -export const document = postcss.document -export const comment = postcss.comment -export const atRule = postcss.atRule -export const rule = postcss.rule -export const decl = postcss.decl -export const root = postcss.root - -export const CssSyntaxError = postcss.CssSyntaxError -export const Declaration = postcss.Declaration -export const Container = postcss.Container -export const Processor = postcss.Processor -export const Document = postcss.Document -export const Comment = postcss.Comment -export const Warning = postcss.Warning -export const AtRule = postcss.AtRule -export const Result = postcss.Result -export const Input = postcss.Input -export const Rule = postcss.Rule -export const Root = postcss.Root -export const Node = postcss.Node diff --git a/node_modules/postcss/lib/previous-map.d.ts b/node_modules/postcss/lib/previous-map.d.ts deleted file mode 100644 index 23edeb5..0000000 --- a/node_modules/postcss/lib/previous-map.d.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { SourceMapConsumer } from 'source-map-js' - -import { ProcessOptions } from './postcss.js' - -declare namespace PreviousMap { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { PreviousMap_ as default } -} - -/** - * Source map information from input CSS. - * For example, source map after Sass compiler. - * - * This class will automatically find source map in input CSS or in file system - * near input file (according `from` option). - * - * ```js - * const root = parse(css, { from: 'a.sass.css' }) - * root.input.map //=> PreviousMap - * ``` - */ -declare class PreviousMap_ { - /** - * `sourceMappingURL` content. - */ - annotation?: string - - /** - * The CSS source identifier. Contains `Input#file` if the user - * set the `from` option, or `Input#id` if they did not. - */ - file?: string - - /** - * Was source map inlined by data-uri to input CSS. - */ - inline: boolean - - /** - * Path to source map file. - */ - mapFile?: string - - /** - * The directory with source map file, if source map is in separated file. - */ - root?: string - - /** - * Source map file content. - */ - text?: string - - /** - * @param css Input CSS source. - * @param opts Process options. - */ - constructor(css: string, opts?: ProcessOptions) - - /** - * Create a instance of `SourceMapGenerator` class - * from the `source-map` library to work with source map information. - * - * It is lazy method, so it will create object only on first call - * and then it will use cache. - * - * @return Object with source map information. - */ - consumer(): SourceMapConsumer - - /** - * Does source map contains `sourcesContent` with input source text. - * - * @return Is `sourcesContent` present. - */ - withContent(): boolean -} - -declare class PreviousMap extends PreviousMap_ {} - -export = PreviousMap diff --git a/node_modules/postcss/lib/previous-map.js b/node_modules/postcss/lib/previous-map.js deleted file mode 100644 index b123dcd..0000000 --- a/node_modules/postcss/lib/previous-map.js +++ /dev/null @@ -1,144 +0,0 @@ -'use strict' - -let { existsSync, readFileSync } = require('fs') -let { dirname, join } = require('path') -let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') - -function fromBase64(str) { - if (Buffer) { - return Buffer.from(str, 'base64').toString() - } else { - /* c8 ignore next 2 */ - return window.atob(str) - } -} - -class PreviousMap { - constructor(css, opts) { - if (opts.map === false) return - this.loadAnnotation(css) - this.inline = this.startWith(this.annotation, 'data:') - - let prev = opts.map ? opts.map.prev : undefined - let text = this.loadMap(opts.from, prev) - if (!this.mapFile && opts.from) { - this.mapFile = opts.from - } - if (this.mapFile) this.root = dirname(this.mapFile) - if (text) this.text = text - } - - consumer() { - if (!this.consumerCache) { - this.consumerCache = new SourceMapConsumer(this.text) - } - return this.consumerCache - } - - decodeInline(text) { - let baseCharsetUri = /^data:application\/json;charset=utf-?8;base64,/ - let baseUri = /^data:application\/json;base64,/ - let charsetUri = /^data:application\/json;charset=utf-?8,/ - let uri = /^data:application\/json,/ - - let uriMatch = text.match(charsetUri) || text.match(uri) - if (uriMatch) { - return decodeURIComponent(text.substr(uriMatch[0].length)) - } - - let baseUriMatch = text.match(baseCharsetUri) || text.match(baseUri) - if (baseUriMatch) { - return fromBase64(text.substr(baseUriMatch[0].length)) - } - - let encoding = text.match(/data:application\/json;([^,]+),/)[1] - throw new Error('Unsupported source map encoding ' + encoding) - } - - getAnnotationURL(sourceMapString) { - return sourceMapString.replace(/^\/\*\s*# sourceMappingURL=/, '').trim() - } - - isMap(map) { - if (typeof map !== 'object') return false - return ( - typeof map.mappings === 'string' || - typeof map._mappings === 'string' || - Array.isArray(map.sections) - ) - } - - loadAnnotation(css) { - let comments = css.match(/\/\*\s*# sourceMappingURL=/g) - if (!comments) return - - // sourceMappingURLs from comments, strings, etc. - let start = css.lastIndexOf(comments.pop()) - let end = css.indexOf('*/', start) - - if (start > -1 && end > -1) { - // Locate the last sourceMappingURL to avoid pickin - this.annotation = this.getAnnotationURL(css.substring(start, end)) - } - } - - loadFile(path) { - this.root = dirname(path) - if (existsSync(path)) { - this.mapFile = path - return readFileSync(path, 'utf-8').toString().trim() - } - } - - loadMap(file, prev) { - if (prev === false) return false - - if (prev) { - if (typeof prev === 'string') { - return prev - } else if (typeof prev === 'function') { - let prevPath = prev(file) - if (prevPath) { - let map = this.loadFile(prevPath) - if (!map) { - throw new Error( - 'Unable to load previous source map: ' + prevPath.toString() - ) - } - return map - } - } else if (prev instanceof SourceMapConsumer) { - return SourceMapGenerator.fromSourceMap(prev).toString() - } else if (prev instanceof SourceMapGenerator) { - return prev.toString() - } else if (this.isMap(prev)) { - return JSON.stringify(prev) - } else { - throw new Error( - 'Unsupported previous source map format: ' + prev.toString() - ) - } - } else if (this.inline) { - return this.decodeInline(this.annotation) - } else if (this.annotation) { - let map = this.annotation - if (file) map = join(dirname(file), map) - return this.loadFile(map) - } - } - - startWith(string, start) { - if (!string) return false - return string.substr(0, start.length) === start - } - - withContent() { - return !!( - this.consumer().sourcesContent && - this.consumer().sourcesContent.length > 0 - ) - } -} - -module.exports = PreviousMap -PreviousMap.default = PreviousMap diff --git a/node_modules/postcss/lib/processor.d.ts b/node_modules/postcss/lib/processor.d.ts deleted file mode 100644 index aa2942c..0000000 --- a/node_modules/postcss/lib/processor.d.ts +++ /dev/null @@ -1,115 +0,0 @@ -import Document from './document.js' -import LazyResult from './lazy-result.js' -import NoWorkResult from './no-work-result.js' -import { - AcceptedPlugin, - Plugin, - ProcessOptions, - TransformCallback, - Transformer -} from './postcss.js' -import Result from './result.js' -import Root from './root.js' - -declare namespace Processor { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Processor_ as default } -} - -/** - * Contains plugins to process CSS. Create one `Processor` instance, - * initialize its plugins, and then use that instance on numerous CSS files. - * - * ```js - * const processor = postcss([autoprefixer, postcssNested]) - * processor.process(css1).then(result => console.log(result.css)) - * processor.process(css2).then(result => console.log(result.css)) - * ``` - */ -declare class Processor_ { - /** - * Plugins added to this processor. - * - * ```js - * const processor = postcss([autoprefixer, postcssNested]) - * processor.plugins.length //=> 2 - * ``` - */ - plugins: (Plugin | TransformCallback | Transformer)[] - - /** - * Current PostCSS version. - * - * ```js - * if (result.processor.version.split('.')[0] !== '6') { - * throw new Error('This plugin works only with PostCSS 6') - * } - * ``` - */ - version: string - - /** - * @param plugins PostCSS plugins - */ - constructor(plugins?: readonly AcceptedPlugin[]) - - /** - * Parses source CSS and returns a `LazyResult` Promise proxy. - * Because some plugins can be asynchronous it doesn’t make - * any transformations. Transformations will be applied - * in the `LazyResult` methods. - * - * ```js - * processor.process(css, { from: 'a.css', to: 'a.out.css' }) - * .then(result => { - * console.log(result.css) - * }) - * ``` - * - * @param css String with input CSS or any object with a `toString()` method, - * like a Buffer. Optionally, send a `Result` instance - * and the processor will take the `Root` from it. - * @param opts Options. - * @return Promise proxy. - */ - process( - css: { toString(): string } | LazyResult | Result | Root | string - ): LazyResult | NoWorkResult - process( - css: { toString(): string } | LazyResult | Result | Root | string, - options: ProcessOptions - ): LazyResult - - /** - * Adds a plugin to be used as a CSS processor. - * - * PostCSS plugin can be in 4 formats: - * * A plugin in `Plugin` format. - * * A plugin creator function with `pluginCreator.postcss = true`. - * PostCSS will call this function without argument to get plugin. - * * A function. PostCSS will pass the function a {@link Root} - * as the first argument and current `Result` instance - * as the second. - * * Another `Processor` instance. PostCSS will copy plugins - * from that instance into this one. - * - * Plugins can also be added by passing them as arguments when creating - * a `postcss` instance (see [`postcss(plugins)`]). - * - * Asynchronous plugins should return a `Promise` instance. - * - * ```js - * const processor = postcss() - * .use(autoprefixer) - * .use(postcssNested) - * ``` - * - * @param plugin PostCSS plugin or `Processor` with plugins. - * @return Current processor to make methods chain. - */ - use(plugin: AcceptedPlugin): this -} - -declare class Processor extends Processor_ {} - -export = Processor diff --git a/node_modules/postcss/lib/processor.js b/node_modules/postcss/lib/processor.js deleted file mode 100644 index d6192ab..0000000 --- a/node_modules/postcss/lib/processor.js +++ /dev/null @@ -1,67 +0,0 @@ -'use strict' - -let Document = require('./document') -let LazyResult = require('./lazy-result') -let NoWorkResult = require('./no-work-result') -let Root = require('./root') - -class Processor { - constructor(plugins = []) { - this.version = '8.4.49' - this.plugins = this.normalize(plugins) - } - - normalize(plugins) { - let normalized = [] - for (let i of plugins) { - if (i.postcss === true) { - i = i() - } else if (i.postcss) { - i = i.postcss - } - - if (typeof i === 'object' && Array.isArray(i.plugins)) { - normalized = normalized.concat(i.plugins) - } else if (typeof i === 'object' && i.postcssPlugin) { - normalized.push(i) - } else if (typeof i === 'function') { - normalized.push(i) - } else if (typeof i === 'object' && (i.parse || i.stringify)) { - if (process.env.NODE_ENV !== 'production') { - throw new Error( - 'PostCSS syntaxes cannot be used as plugins. Instead, please use ' + - 'one of the syntax/parser/stringifier options as outlined ' + - 'in your PostCSS runner documentation.' - ) - } - } else { - throw new Error(i + ' is not a PostCSS plugin') - } - } - return normalized - } - - process(css, opts = {}) { - if ( - !this.plugins.length && - !opts.parser && - !opts.stringifier && - !opts.syntax - ) { - return new NoWorkResult(this, css, opts) - } else { - return new LazyResult(this, css, opts) - } - } - - use(plugin) { - this.plugins = this.plugins.concat(this.normalize([plugin])) - return this - } -} - -module.exports = Processor -Processor.default = Processor - -Root.registerProcessor(Processor) -Document.registerProcessor(Processor) diff --git a/node_modules/postcss/lib/result.d.ts b/node_modules/postcss/lib/result.d.ts deleted file mode 100644 index 40e086f..0000000 --- a/node_modules/postcss/lib/result.d.ts +++ /dev/null @@ -1,205 +0,0 @@ -import { - Document, - Node, - Plugin, - ProcessOptions, - Root, - SourceMap, - TransformCallback, - Warning, - WarningOptions -} from './postcss.js' -import Processor from './processor.js' - -declare namespace Result { - export interface Message { - [others: string]: any - - /** - * Source PostCSS plugin name. - */ - plugin?: string - - /** - * Message type. - */ - type: string - } - - export interface ResultOptions extends ProcessOptions { - /** - * The CSS node that was the source of the warning. - */ - node?: Node - - /** - * Name of plugin that created this warning. `Result#warn` will fill it - * automatically with `Plugin#postcssPlugin` value. - */ - plugin?: string - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Result_ as default } -} - -/** - * Provides the result of the PostCSS transformations. - * - * A Result instance is returned by `LazyResult#then` - * or `Root#toResult` methods. - * - * ```js - * postcss([autoprefixer]).process(css).then(result => { - * console.log(result.css) - * }) - * ``` - * - * ```js - * const result2 = postcss.parse(css).toResult() - * ``` - */ -declare class Result_ { - /** - * A CSS string representing of `Result#root`. - * - * ```js - * postcss.parse('a{}').toResult().css //=> "a{}" - * ``` - */ - css: string - - /** - * Last runned PostCSS plugin. - */ - lastPlugin: Plugin | TransformCallback - - /** - * An instance of `SourceMapGenerator` class from the `source-map` library, - * representing changes to the `Result#root` instance. - * - * ```js - * result.map.toJSON() //=> { version: 3, file: 'a.css', … } - * ``` - * - * ```js - * if (result.map) { - * fs.writeFileSync(result.opts.to + '.map', result.map.toString()) - * } - * ``` - */ - map: SourceMap - - /** - * Contains messages from plugins (e.g., warnings or custom messages). - * Each message should have type and plugin properties. - * - * ```js - * AtRule: { - * import: (atRule, { result }) { - * const importedFile = parseImport(atRule) - * result.messages.push({ - * type: 'dependency', - * plugin: 'postcss-import', - * file: importedFile, - * parent: result.opts.from - * }) - * } - * } - * ``` - */ - messages: Result.Message[] - - /** - * Options from the `Processor#process` or `Root#toResult` call - * that produced this Result instance.] - * - * ```js - * root.toResult(opts).opts === opts - * ``` - */ - opts: Result.ResultOptions - - /** - * The Processor instance used for this transformation. - * - * ```js - * for (const plugin of result.processor.plugins) { - * if (plugin.postcssPlugin === 'postcss-bad') { - * throw 'postcss-good is incompatible with postcss-bad' - * } - * }) - * ``` - */ - processor: Processor - - /** - * Root node after all transformations. - * - * ```js - * root.toResult().root === root - * ``` - */ - root: RootNode - - /** - * @param processor Processor used for this transformation. - * @param root Root node after all transformations. - * @param opts Options from the `Processor#process` or `Root#toResult`. - */ - constructor(processor: Processor, root: RootNode, opts: Result.ResultOptions) - - /** - * Returns for `Result#css` content. - * - * ```js - * result + '' === result.css - * ``` - * - * @return String representing of `Result#root`. - */ - toString(): string - - /** - * Creates an instance of `Warning` and adds it to `Result#messages`. - * - * ```js - * if (decl.important) { - * result.warn('Avoid !important', { node: decl, word: '!important' }) - * } - * ``` - * - * @param text Warning message. - * @param opts Warning options. - * @return Created warning. - */ - warn(message: string, options?: WarningOptions): Warning - - /** - * Returns warnings from plugins. Filters `Warning` instances - * from `Result#messages`. - * - * ```js - * result.warnings().forEach(warn => { - * console.warn(warn.toString()) - * }) - * ``` - * - * @return Warnings from plugins. - */ - warnings(): Warning[] - - /** - * An alias for the `Result#css` property. - * Use it with syntaxes that generate non-CSS output. - * - * ```js - * result.css === result.content - * ``` - */ - get content(): string -} - -declare class Result extends Result_ {} - -export = Result diff --git a/node_modules/postcss/lib/result.js b/node_modules/postcss/lib/result.js deleted file mode 100644 index a39751d..0000000 --- a/node_modules/postcss/lib/result.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -let Warning = require('./warning') - -class Result { - constructor(processor, root, opts) { - this.processor = processor - this.messages = [] - this.root = root - this.opts = opts - this.css = undefined - this.map = undefined - } - - toString() { - return this.css - } - - warn(text, opts = {}) { - if (!opts.plugin) { - if (this.lastPlugin && this.lastPlugin.postcssPlugin) { - opts.plugin = this.lastPlugin.postcssPlugin - } - } - - let warning = new Warning(text, opts) - this.messages.push(warning) - - return warning - } - - warnings() { - return this.messages.filter(i => i.type === 'warning') - } - - get content() { - return this.css - } -} - -module.exports = Result -Result.default = Result diff --git a/node_modules/postcss/lib/root.d.ts b/node_modules/postcss/lib/root.d.ts deleted file mode 100644 index 5c91139..0000000 --- a/node_modules/postcss/lib/root.d.ts +++ /dev/null @@ -1,87 +0,0 @@ -import Container, { ContainerProps } from './container.js' -import Document from './document.js' -import { ProcessOptions } from './postcss.js' -import Result from './result.js' - -declare namespace Root { - export interface RootRaws extends Record { - /** - * The space symbols after the last child to the end of file. - */ - after?: string - - /** - * Non-CSS code after `Root`, when `Root` is inside `Document`. - * - * **Experimental:** some aspects of this node could change within minor - * or patch version releases. - */ - codeAfter?: string - - /** - * Non-CSS code before `Root`, when `Root` is inside `Document`. - * - * **Experimental:** some aspects of this node could change within minor - * or patch version releases. - */ - codeBefore?: string - - /** - * Is the last child has an (optional) semicolon. - */ - semicolon?: boolean - } - - export interface RootProps extends ContainerProps { - /** - * Information used to generate byte-to-byte equal node string - * as it was in the origin input. - * */ - raws?: RootRaws - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Root_ as default } -} - -/** - * Represents a CSS file and contains all its parsed nodes. - * - * ```js - * const root = postcss.parse('a{color:black} b{z-index:2}') - * root.type //=> 'root' - * root.nodes.length //=> 2 - * ``` - */ -declare class Root_ extends Container { - nodes: NonNullable - parent: Document | undefined - raws: Root.RootRaws - type: 'root' - - constructor(defaults?: Root.RootProps) - - assign(overrides: object | Root.RootProps): this - clone(overrides?: Partial): this - cloneAfter(overrides?: Partial): this - cloneBefore(overrides?: Partial): this - - /** - * Returns a `Result` instance representing the root’s CSS. - * - * ```js - * const root1 = postcss.parse(css1, { from: 'a.css' }) - * const root2 = postcss.parse(css2, { from: 'b.css' }) - * root1.append(root2) - * const result = root1.toResult({ to: 'all.css', map: true }) - * ``` - * - * @param options Options. - * @return Result with current root’s CSS. - */ - toResult(options?: ProcessOptions): Result -} - -declare class Root extends Root_ {} - -export = Root diff --git a/node_modules/postcss/lib/root.js b/node_modules/postcss/lib/root.js deleted file mode 100644 index ea574ed..0000000 --- a/node_modules/postcss/lib/root.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -let Container = require('./container') - -let LazyResult, Processor - -class Root extends Container { - constructor(defaults) { - super(defaults) - this.type = 'root' - if (!this.nodes) this.nodes = [] - } - - normalize(child, sample, type) { - let nodes = super.normalize(child) - - if (sample) { - if (type === 'prepend') { - if (this.nodes.length > 1) { - sample.raws.before = this.nodes[1].raws.before - } else { - delete sample.raws.before - } - } else if (this.first !== sample) { - for (let node of nodes) { - node.raws.before = sample.raws.before - } - } - } - - return nodes - } - - removeChild(child, ignore) { - let index = this.index(child) - - if (!ignore && index === 0 && this.nodes.length > 1) { - this.nodes[1].raws.before = this.nodes[index].raws.before - } - - return super.removeChild(child) - } - - toResult(opts = {}) { - let lazy = new LazyResult(new Processor(), this, opts) - return lazy.stringify() - } -} - -Root.registerLazyResult = dependant => { - LazyResult = dependant -} - -Root.registerProcessor = dependant => { - Processor = dependant -} - -module.exports = Root -Root.default = Root - -Container.registerRoot(Root) diff --git a/node_modules/postcss/lib/rule.d.ts b/node_modules/postcss/lib/rule.d.ts deleted file mode 100644 index da8aae7..0000000 --- a/node_modules/postcss/lib/rule.d.ts +++ /dev/null @@ -1,126 +0,0 @@ -import Container, { - ContainerProps, - ContainerWithChildren -} from './container.js' - -declare namespace Rule { - export interface RuleRaws extends Record { - /** - * The space symbols after the last child of the node to the end of the node. - */ - after?: string - - /** - * The space symbols before the node. It also stores `*` - * and `_` symbols before the declaration (IE hack). - */ - before?: string - - /** - * The symbols between the selector and `{` for rules. - */ - between?: string - - /** - * Contains `true` if there is semicolon after rule. - */ - ownSemicolon?: string - - /** - * The rule’s selector with comments. - */ - selector?: { - raw: string - value: string - } - - /** - * Contains `true` if the last child has an (optional) semicolon. - */ - semicolon?: boolean - } - - export type RuleProps = { - /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ - raws?: RuleRaws - } & ( - | { - /** Selector or selectors of the rule. */ - selector: string - selectors?: never - } - | { - selector?: never - /** Selectors of the rule represented as an array of strings. */ - selectors: readonly string[] - } - ) & ContainerProps - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Rule_ as default } -} - -/** - * Represents a CSS rule: a selector followed by a declaration block. - * - * ```js - * Once (root, { Rule }) { - * let a = new Rule({ selector: 'a' }) - * a.append(…) - * root.append(a) - * } - * ``` - * - * ```js - * const root = postcss.parse('a{}') - * const rule = root.first - * rule.type //=> 'rule' - * rule.toString() //=> 'a{}' - * ``` - */ -declare class Rule_ extends Container { - nodes: NonNullable - parent: ContainerWithChildren | undefined - raws: Rule.RuleRaws - type: 'rule' - constructor(defaults?: Rule.RuleProps) - - assign(overrides: object | Rule.RuleProps): this - clone(overrides?: Partial): this - - cloneAfter(overrides?: Partial): this - - cloneBefore(overrides?: Partial): this - /** - * The rule’s full selector represented as a string. - * - * ```js - * const root = postcss.parse('a, b { }') - * const rule = root.first - * rule.selector //=> 'a, b' - * ``` - */ - get selector(): string - set selector(value: string) - /** - * An array containing the rule’s individual selectors. - * Groups of selectors are split at commas. - * - * ```js - * const root = postcss.parse('a, b { }') - * const rule = root.first - * - * rule.selector //=> 'a, b' - * rule.selectors //=> ['a', 'b'] - * - * rule.selectors = ['a', 'strong'] - * rule.selector //=> 'a, strong' - * ``` - */ - get selectors(): string[] - set selectors(values: string[]) -} - -declare class Rule extends Rule_ {} - -export = Rule diff --git a/node_modules/postcss/lib/rule.js b/node_modules/postcss/lib/rule.js deleted file mode 100644 index a93ab25..0000000 --- a/node_modules/postcss/lib/rule.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict' - -let Container = require('./container') -let list = require('./list') - -class Rule extends Container { - constructor(defaults) { - super(defaults) - this.type = 'rule' - if (!this.nodes) this.nodes = [] - } - - get selectors() { - return list.comma(this.selector) - } - - set selectors(values) { - let match = this.selector ? this.selector.match(/,\s*/) : null - let sep = match ? match[0] : ',' + this.raw('between', 'beforeOpen') - this.selector = values.join(sep) - } -} - -module.exports = Rule -Rule.default = Rule - -Container.registerRule(Rule) diff --git a/node_modules/postcss/lib/stringifier.d.ts b/node_modules/postcss/lib/stringifier.d.ts deleted file mode 100644 index f707a6a..0000000 --- a/node_modules/postcss/lib/stringifier.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { - AnyNode, - AtRule, - Builder, - Comment, - Container, - Declaration, - Document, - Root, - Rule -} from './postcss.js' - -declare namespace Stringifier { - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Stringifier_ as default } -} - -declare class Stringifier_ { - builder: Builder - constructor(builder: Builder) - atrule(node: AtRule, semicolon?: boolean): void - beforeAfter(node: AnyNode, detect: 'after' | 'before'): string - block(node: AnyNode, start: string): void - body(node: Container): void - comment(node: Comment): void - decl(node: Declaration, semicolon?: boolean): void - document(node: Document): void - raw(node: AnyNode, own: null | string, detect?: string): string - rawBeforeClose(root: Root): string | undefined - rawBeforeComment(root: Root, node: Comment): string | undefined - rawBeforeDecl(root: Root, node: Declaration): string | undefined - rawBeforeOpen(root: Root): string | undefined - rawBeforeRule(root: Root): string | undefined - rawColon(root: Root): string | undefined - rawEmptyBody(root: Root): string | undefined - rawIndent(root: Root): string | undefined - rawSemicolon(root: Root): boolean | undefined - rawValue(node: AnyNode, prop: string): string - root(node: Root): void - rule(node: Rule): void - stringify(node: AnyNode, semicolon?: boolean): void -} - -declare class Stringifier extends Stringifier_ {} - -export = Stringifier diff --git a/node_modules/postcss/lib/stringifier.js b/node_modules/postcss/lib/stringifier.js deleted file mode 100644 index e07ad12..0000000 --- a/node_modules/postcss/lib/stringifier.js +++ /dev/null @@ -1,353 +0,0 @@ -'use strict' - -const DEFAULT_RAW = { - after: '\n', - beforeClose: '\n', - beforeComment: '\n', - beforeDecl: '\n', - beforeOpen: ' ', - beforeRule: '\n', - colon: ': ', - commentLeft: ' ', - commentRight: ' ', - emptyBody: '', - indent: ' ', - semicolon: false -} - -function capitalize(str) { - return str[0].toUpperCase() + str.slice(1) -} - -class Stringifier { - constructor(builder) { - this.builder = builder - } - - atrule(node, semicolon) { - let name = '@' + node.name - let params = node.params ? this.rawValue(node, 'params') : '' - - if (typeof node.raws.afterName !== 'undefined') { - name += node.raws.afterName - } else if (params) { - name += ' ' - } - - if (node.nodes) { - this.block(node, name + params) - } else { - let end = (node.raws.between || '') + (semicolon ? ';' : '') - this.builder(name + params + end, node) - } - } - - beforeAfter(node, detect) { - let value - if (node.type === 'decl') { - value = this.raw(node, null, 'beforeDecl') - } else if (node.type === 'comment') { - value = this.raw(node, null, 'beforeComment') - } else if (detect === 'before') { - value = this.raw(node, null, 'beforeRule') - } else { - value = this.raw(node, null, 'beforeClose') - } - - let buf = node.parent - let depth = 0 - while (buf && buf.type !== 'root') { - depth += 1 - buf = buf.parent - } - - if (value.includes('\n')) { - let indent = this.raw(node, null, 'indent') - if (indent.length) { - for (let step = 0; step < depth; step++) value += indent - } - } - - return value - } - - block(node, start) { - let between = this.raw(node, 'between', 'beforeOpen') - this.builder(start + between + '{', node, 'start') - - let after - if (node.nodes && node.nodes.length) { - this.body(node) - after = this.raw(node, 'after') - } else { - after = this.raw(node, 'after', 'emptyBody') - } - - if (after) this.builder(after) - this.builder('}', node, 'end') - } - - body(node) { - let last = node.nodes.length - 1 - while (last > 0) { - if (node.nodes[last].type !== 'comment') break - last -= 1 - } - - let semicolon = this.raw(node, 'semicolon') - for (let i = 0; i < node.nodes.length; i++) { - let child = node.nodes[i] - let before = this.raw(child, 'before') - if (before) this.builder(before) - this.stringify(child, last !== i || semicolon) - } - } - - comment(node) { - let left = this.raw(node, 'left', 'commentLeft') - let right = this.raw(node, 'right', 'commentRight') - this.builder('/*' + left + node.text + right + '*/', node) - } - - decl(node, semicolon) { - let between = this.raw(node, 'between', 'colon') - let string = node.prop + between + this.rawValue(node, 'value') - - if (node.important) { - string += node.raws.important || ' !important' - } - - if (semicolon) string += ';' - this.builder(string, node) - } - - document(node) { - this.body(node) - } - - raw(node, own, detect) { - let value - if (!detect) detect = own - - // Already had - if (own) { - value = node.raws[own] - if (typeof value !== 'undefined') return value - } - - let parent = node.parent - - if (detect === 'before') { - // Hack for first rule in CSS - if (!parent || (parent.type === 'root' && parent.first === node)) { - return '' - } - - // `root` nodes in `document` should use only their own raws - if (parent && parent.type === 'document') { - return '' - } - } - - // Floating child without parent - if (!parent) return DEFAULT_RAW[detect] - - // Detect style by other nodes - let root = node.root() - if (!root.rawCache) root.rawCache = {} - if (typeof root.rawCache[detect] !== 'undefined') { - return root.rawCache[detect] - } - - if (detect === 'before' || detect === 'after') { - return this.beforeAfter(node, detect) - } else { - let method = 'raw' + capitalize(detect) - if (this[method]) { - value = this[method](root, node) - } else { - root.walk(i => { - value = i.raws[own] - if (typeof value !== 'undefined') return false - }) - } - } - - if (typeof value === 'undefined') value = DEFAULT_RAW[detect] - - root.rawCache[detect] = value - return value - } - - rawBeforeClose(root) { - let value - root.walk(i => { - if (i.nodes && i.nodes.length > 0) { - if (typeof i.raws.after !== 'undefined') { - value = i.raws.after - if (value.includes('\n')) { - value = value.replace(/[^\n]+$/, '') - } - return false - } - } - }) - if (value) value = value.replace(/\S/g, '') - return value - } - - rawBeforeComment(root, node) { - let value - root.walkComments(i => { - if (typeof i.raws.before !== 'undefined') { - value = i.raws.before - if (value.includes('\n')) { - value = value.replace(/[^\n]+$/, '') - } - return false - } - }) - if (typeof value === 'undefined') { - value = this.raw(node, null, 'beforeDecl') - } else if (value) { - value = value.replace(/\S/g, '') - } - return value - } - - rawBeforeDecl(root, node) { - let value - root.walkDecls(i => { - if (typeof i.raws.before !== 'undefined') { - value = i.raws.before - if (value.includes('\n')) { - value = value.replace(/[^\n]+$/, '') - } - return false - } - }) - if (typeof value === 'undefined') { - value = this.raw(node, null, 'beforeRule') - } else if (value) { - value = value.replace(/\S/g, '') - } - return value - } - - rawBeforeOpen(root) { - let value - root.walk(i => { - if (i.type !== 'decl') { - value = i.raws.between - if (typeof value !== 'undefined') return false - } - }) - return value - } - - rawBeforeRule(root) { - let value - root.walk(i => { - if (i.nodes && (i.parent !== root || root.first !== i)) { - if (typeof i.raws.before !== 'undefined') { - value = i.raws.before - if (value.includes('\n')) { - value = value.replace(/[^\n]+$/, '') - } - return false - } - } - }) - if (value) value = value.replace(/\S/g, '') - return value - } - - rawColon(root) { - let value - root.walkDecls(i => { - if (typeof i.raws.between !== 'undefined') { - value = i.raws.between.replace(/[^\s:]/g, '') - return false - } - }) - return value - } - - rawEmptyBody(root) { - let value - root.walk(i => { - if (i.nodes && i.nodes.length === 0) { - value = i.raws.after - if (typeof value !== 'undefined') return false - } - }) - return value - } - - rawIndent(root) { - if (root.raws.indent) return root.raws.indent - let value - root.walk(i => { - let p = i.parent - if (p && p !== root && p.parent && p.parent === root) { - if (typeof i.raws.before !== 'undefined') { - let parts = i.raws.before.split('\n') - value = parts[parts.length - 1] - value = value.replace(/\S/g, '') - return false - } - } - }) - return value - } - - rawSemicolon(root) { - let value - root.walk(i => { - if (i.nodes && i.nodes.length && i.last.type === 'decl') { - value = i.raws.semicolon - if (typeof value !== 'undefined') return false - } - }) - return value - } - - rawValue(node, prop) { - let value = node[prop] - let raw = node.raws[prop] - if (raw && raw.value === value) { - return raw.raw - } - - return value - } - - root(node) { - this.body(node) - if (node.raws.after) this.builder(node.raws.after) - } - - rule(node) { - this.block(node, this.rawValue(node, 'selector')) - if (node.raws.ownSemicolon) { - this.builder(node.raws.ownSemicolon, node, 'end') - } - } - - stringify(node, semicolon) { - /* c8 ignore start */ - if (!this[node.type]) { - throw new Error( - 'Unknown AST node type ' + - node.type + - '. ' + - 'Maybe you need to change PostCSS stringifier.' - ) - } - /* c8 ignore stop */ - this[node.type](node, semicolon) - } -} - -module.exports = Stringifier -Stringifier.default = Stringifier diff --git a/node_modules/postcss/lib/stringify.d.ts b/node_modules/postcss/lib/stringify.d.ts deleted file mode 100644 index 06ad0b4..0000000 --- a/node_modules/postcss/lib/stringify.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Stringifier } from './postcss.js' - -interface Stringify extends Stringifier { - default: Stringify -} - -declare const stringify: Stringify - -export = stringify diff --git a/node_modules/postcss/lib/stringify.js b/node_modules/postcss/lib/stringify.js deleted file mode 100644 index 77bd017..0000000 --- a/node_modules/postcss/lib/stringify.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' - -let Stringifier = require('./stringifier') - -function stringify(node, builder) { - let str = new Stringifier(builder) - str.stringify(node) -} - -module.exports = stringify -stringify.default = stringify diff --git a/node_modules/postcss/lib/symbols.js b/node_modules/postcss/lib/symbols.js deleted file mode 100644 index a142c26..0000000 --- a/node_modules/postcss/lib/symbols.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -module.exports.isClean = Symbol('isClean') - -module.exports.my = Symbol('my') diff --git a/node_modules/postcss/lib/terminal-highlight.js b/node_modules/postcss/lib/terminal-highlight.js deleted file mode 100644 index 6196c9d..0000000 --- a/node_modules/postcss/lib/terminal-highlight.js +++ /dev/null @@ -1,70 +0,0 @@ -'use strict' - -let pico = require('picocolors') - -let tokenizer = require('./tokenize') - -let Input - -function registerInput(dependant) { - Input = dependant -} - -const HIGHLIGHT_THEME = { - ';': pico.yellow, - ':': pico.yellow, - '(': pico.cyan, - ')': pico.cyan, - '[': pico.yellow, - ']': pico.yellow, - '{': pico.yellow, - '}': pico.yellow, - 'at-word': pico.cyan, - 'brackets': pico.cyan, - 'call': pico.cyan, - 'class': pico.yellow, - 'comment': pico.gray, - 'hash': pico.magenta, - 'string': pico.green -} - -function getTokenType([type, value], processor) { - if (type === 'word') { - if (value[0] === '.') { - return 'class' - } - if (value[0] === '#') { - return 'hash' - } - } - - if (!processor.endOfFile()) { - let next = processor.nextToken() - processor.back(next) - if (next[0] === 'brackets' || next[0] === '(') return 'call' - } - - return type -} - -function terminalHighlight(css) { - let processor = tokenizer(new Input(css), { ignoreErrors: true }) - let result = '' - while (!processor.endOfFile()) { - let token = processor.nextToken() - let color = HIGHLIGHT_THEME[getTokenType(token, processor)] - if (color) { - result += token[1] - .split(/\r?\n/) - .map(i => color(i)) - .join('\n') - } else { - result += token[1] - } - } - return result -} - -terminalHighlight.registerInput = registerInput - -module.exports = terminalHighlight diff --git a/node_modules/postcss/lib/tokenize.js b/node_modules/postcss/lib/tokenize.js deleted file mode 100644 index 1d41284..0000000 --- a/node_modules/postcss/lib/tokenize.js +++ /dev/null @@ -1,266 +0,0 @@ -'use strict' - -const SINGLE_QUOTE = "'".charCodeAt(0) -const DOUBLE_QUOTE = '"'.charCodeAt(0) -const BACKSLASH = '\\'.charCodeAt(0) -const SLASH = '/'.charCodeAt(0) -const NEWLINE = '\n'.charCodeAt(0) -const SPACE = ' '.charCodeAt(0) -const FEED = '\f'.charCodeAt(0) -const TAB = '\t'.charCodeAt(0) -const CR = '\r'.charCodeAt(0) -const OPEN_SQUARE = '['.charCodeAt(0) -const CLOSE_SQUARE = ']'.charCodeAt(0) -const OPEN_PARENTHESES = '('.charCodeAt(0) -const CLOSE_PARENTHESES = ')'.charCodeAt(0) -const OPEN_CURLY = '{'.charCodeAt(0) -const CLOSE_CURLY = '}'.charCodeAt(0) -const SEMICOLON = ';'.charCodeAt(0) -const ASTERISK = '*'.charCodeAt(0) -const COLON = ':'.charCodeAt(0) -const AT = '@'.charCodeAt(0) - -const RE_AT_END = /[\t\n\f\r "#'()/;[\\\]{}]/g -const RE_WORD_END = /[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g -const RE_BAD_BRACKET = /.[\r\n"'(/\\]/ -const RE_HEX_ESCAPE = /[\da-f]/i - -module.exports = function tokenizer(input, options = {}) { - let css = input.css.valueOf() - let ignore = options.ignoreErrors - - let code, content, escape, next, quote - let currentToken, escaped, escapePos, n, prev - - let length = css.length - let pos = 0 - let buffer = [] - let returned = [] - - function position() { - return pos - } - - function unclosed(what) { - throw input.error('Unclosed ' + what, pos) - } - - function endOfFile() { - return returned.length === 0 && pos >= length - } - - function nextToken(opts) { - if (returned.length) return returned.pop() - if (pos >= length) return - - let ignoreUnclosed = opts ? opts.ignoreUnclosed : false - - code = css.charCodeAt(pos) - - switch (code) { - case NEWLINE: - case SPACE: - case TAB: - case CR: - case FEED: { - next = pos - do { - next += 1 - code = css.charCodeAt(next) - } while ( - code === SPACE || - code === NEWLINE || - code === TAB || - code === CR || - code === FEED - ) - - currentToken = ['space', css.slice(pos, next)] - pos = next - 1 - break - } - - case OPEN_SQUARE: - case CLOSE_SQUARE: - case OPEN_CURLY: - case CLOSE_CURLY: - case COLON: - case SEMICOLON: - case CLOSE_PARENTHESES: { - let controlChar = String.fromCharCode(code) - currentToken = [controlChar, controlChar, pos] - break - } - - case OPEN_PARENTHESES: { - prev = buffer.length ? buffer.pop()[1] : '' - n = css.charCodeAt(pos + 1) - if ( - prev === 'url' && - n !== SINGLE_QUOTE && - n !== DOUBLE_QUOTE && - n !== SPACE && - n !== NEWLINE && - n !== TAB && - n !== FEED && - n !== CR - ) { - next = pos - do { - escaped = false - next = css.indexOf(')', next + 1) - if (next === -1) { - if (ignore || ignoreUnclosed) { - next = pos - break - } else { - unclosed('bracket') - } - } - escapePos = next - while (css.charCodeAt(escapePos - 1) === BACKSLASH) { - escapePos -= 1 - escaped = !escaped - } - } while (escaped) - - currentToken = ['brackets', css.slice(pos, next + 1), pos, next] - - pos = next - } else { - next = css.indexOf(')', pos + 1) - content = css.slice(pos, next + 1) - - if (next === -1 || RE_BAD_BRACKET.test(content)) { - currentToken = ['(', '(', pos] - } else { - currentToken = ['brackets', content, pos, next] - pos = next - } - } - - break - } - - case SINGLE_QUOTE: - case DOUBLE_QUOTE: { - quote = code === SINGLE_QUOTE ? "'" : '"' - next = pos - do { - escaped = false - next = css.indexOf(quote, next + 1) - if (next === -1) { - if (ignore || ignoreUnclosed) { - next = pos + 1 - break - } else { - unclosed('string') - } - } - escapePos = next - while (css.charCodeAt(escapePos - 1) === BACKSLASH) { - escapePos -= 1 - escaped = !escaped - } - } while (escaped) - - currentToken = ['string', css.slice(pos, next + 1), pos, next] - pos = next - break - } - - case AT: { - RE_AT_END.lastIndex = pos + 1 - RE_AT_END.test(css) - if (RE_AT_END.lastIndex === 0) { - next = css.length - 1 - } else { - next = RE_AT_END.lastIndex - 2 - } - - currentToken = ['at-word', css.slice(pos, next + 1), pos, next] - - pos = next - break - } - - case BACKSLASH: { - next = pos - escape = true - while (css.charCodeAt(next + 1) === BACKSLASH) { - next += 1 - escape = !escape - } - code = css.charCodeAt(next + 1) - if ( - escape && - code !== SLASH && - code !== SPACE && - code !== NEWLINE && - code !== TAB && - code !== CR && - code !== FEED - ) { - next += 1 - if (RE_HEX_ESCAPE.test(css.charAt(next))) { - while (RE_HEX_ESCAPE.test(css.charAt(next + 1))) { - next += 1 - } - if (css.charCodeAt(next + 1) === SPACE) { - next += 1 - } - } - } - - currentToken = ['word', css.slice(pos, next + 1), pos, next] - - pos = next - break - } - - default: { - if (code === SLASH && css.charCodeAt(pos + 1) === ASTERISK) { - next = css.indexOf('*/', pos + 2) + 1 - if (next === 0) { - if (ignore || ignoreUnclosed) { - next = css.length - } else { - unclosed('comment') - } - } - - currentToken = ['comment', css.slice(pos, next + 1), pos, next] - pos = next - } else { - RE_WORD_END.lastIndex = pos + 1 - RE_WORD_END.test(css) - if (RE_WORD_END.lastIndex === 0) { - next = css.length - 1 - } else { - next = RE_WORD_END.lastIndex - 2 - } - - currentToken = ['word', css.slice(pos, next + 1), pos, next] - buffer.push(currentToken) - pos = next - } - - break - } - } - - pos++ - return currentToken - } - - function back(token) { - returned.push(token) - } - - return { - back, - endOfFile, - nextToken, - position - } -} diff --git a/node_modules/postcss/lib/warn-once.js b/node_modules/postcss/lib/warn-once.js deleted file mode 100644 index 316e1cf..0000000 --- a/node_modules/postcss/lib/warn-once.js +++ /dev/null @@ -1,13 +0,0 @@ -/* eslint-disable no-console */ -'use strict' - -let printed = {} - -module.exports = function warnOnce(message) { - if (printed[message]) return - printed[message] = true - - if (typeof console !== 'undefined' && console.warn) { - console.warn(message) - } -} diff --git a/node_modules/postcss/lib/warning.d.ts b/node_modules/postcss/lib/warning.d.ts deleted file mode 100644 index b25bba8..0000000 --- a/node_modules/postcss/lib/warning.d.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { RangePosition } from './css-syntax-error.js' -import Node from './node.js' - -declare namespace Warning { - export interface WarningOptions { - /** - * End position, exclusive, in CSS node string that caused the warning. - */ - end?: RangePosition - - /** - * End index, exclusive, in CSS node string that caused the warning. - */ - endIndex?: number - - /** - * Start index, inclusive, in CSS node string that caused the warning. - */ - index?: number - - /** - * CSS node that caused the warning. - */ - node?: Node - - /** - * Name of the plugin that created this warning. `Result#warn` fills - * this property automatically. - */ - plugin?: string - - /** - * Start position, inclusive, in CSS node string that caused the warning. - */ - start?: RangePosition - - /** - * Word in CSS source that caused the warning. - */ - word?: string - } - - // eslint-disable-next-line @typescript-eslint/no-use-before-define - export { Warning_ as default } -} - -/** - * Represents a plugin’s warning. It can be created using `Node#warn`. - * - * ```js - * if (decl.important) { - * decl.warn(result, 'Avoid !important', { word: '!important' }) - * } - * ``` - */ -declare class Warning_ { - /** - * Column for inclusive start position in the input file with this warning’s source. - * - * ```js - * warning.column //=> 6 - * ``` - */ - column: number - - /** - * Column for exclusive end position in the input file with this warning’s source. - * - * ```js - * warning.endColumn //=> 4 - * ``` - */ - endColumn?: number - - /** - * Line for exclusive end position in the input file with this warning’s source. - * - * ```js - * warning.endLine //=> 6 - * ``` - */ - endLine?: number - - /** - * Line for inclusive start position in the input file with this warning’s source. - * - * ```js - * warning.line //=> 5 - * ``` - */ - line: number - - /** - * Contains the CSS node that caused the warning. - * - * ```js - * warning.node.toString() //=> 'color: white !important' - * ``` - */ - node: Node - - /** - * The name of the plugin that created this warning. - * When you call `Node#warn` it will fill this property automatically. - * - * ```js - * warning.plugin //=> 'postcss-important' - * ``` - */ - plugin: string - - /** - * The warning message. - * - * ```js - * warning.text //=> 'Try to avoid !important' - * ``` - */ - text: string - - /** - * Type to filter warnings from `Result#messages`. - * Always equal to `"warning"`. - */ - type: 'warning' - - /** - * @param text Warning message. - * @param opts Warning options. - */ - constructor(text: string, opts?: Warning.WarningOptions) - - /** - * Returns a warning position and message. - * - * ```js - * warning.toString() //=> 'postcss-lint:a.css:10:14: Avoid !important' - * ``` - * - * @return Warning position and message. - */ - toString(): string -} - -declare class Warning extends Warning_ {} - -export = Warning diff --git a/node_modules/postcss/lib/warning.js b/node_modules/postcss/lib/warning.js deleted file mode 100644 index 3a3d79c..0000000 --- a/node_modules/postcss/lib/warning.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict' - -class Warning { - constructor(text, opts = {}) { - this.type = 'warning' - this.text = text - - if (opts.node && opts.node.source) { - let range = opts.node.rangeBy(opts) - this.line = range.start.line - this.column = range.start.column - this.endLine = range.end.line - this.endColumn = range.end.column - } - - for (let opt in opts) this[opt] = opts[opt] - } - - toString() { - if (this.node) { - return this.node.error(this.text, { - index: this.index, - plugin: this.plugin, - word: this.word - }).message - } - - if (this.plugin) { - return this.plugin + ': ' + this.text - } - - return this.text - } -} - -module.exports = Warning -Warning.default = Warning diff --git a/node_modules/postcss/package.json b/node_modules/postcss/package.json deleted file mode 100755 index 4890c36..0000000 --- a/node_modules/postcss/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "postcss", - "version": "8.4.49", - "description": "Tool for transforming styles with JS plugins", - "engines": { - "node": "^10 || ^12 || >=14" - }, - "exports": { - ".": { - "require": "./lib/postcss.js", - "import": "./lib/postcss.mjs" - }, - "./lib/at-rule": "./lib/at-rule.js", - "./lib/comment": "./lib/comment.js", - "./lib/container": "./lib/container.js", - "./lib/css-syntax-error": "./lib/css-syntax-error.js", - "./lib/declaration": "./lib/declaration.js", - "./lib/fromJSON": "./lib/fromJSON.js", - "./lib/input": "./lib/input.js", - "./lib/lazy-result": "./lib/lazy-result.js", - "./lib/no-work-result": "./lib/no-work-result.js", - "./lib/list": "./lib/list.js", - "./lib/map-generator": "./lib/map-generator.js", - "./lib/node": "./lib/node.js", - "./lib/parse": "./lib/parse.js", - "./lib/parser": "./lib/parser.js", - "./lib/postcss": "./lib/postcss.js", - "./lib/previous-map": "./lib/previous-map.js", - "./lib/processor": "./lib/processor.js", - "./lib/result": "./lib/result.js", - "./lib/root": "./lib/root.js", - "./lib/rule": "./lib/rule.js", - "./lib/stringifier": "./lib/stringifier.js", - "./lib/stringify": "./lib/stringify.js", - "./lib/symbols": "./lib/symbols.js", - "./lib/terminal-highlight": "./lib/terminal-highlight.js", - "./lib/tokenize": "./lib/tokenize.js", - "./lib/warn-once": "./lib/warn-once.js", - "./lib/warning": "./lib/warning.js", - "./package.json": "./package.json" - }, - "main": "./lib/postcss.js", - "types": "./lib/postcss.d.ts", - "keywords": [ - "css", - "postcss", - "rework", - "preprocessor", - "parser", - "source map", - "transform", - "manipulation", - "transpiler" - ], - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "author": "Andrey Sitnik ", - "license": "MIT", - "homepage": "https://postcss.org/", - "repository": "postcss/postcss", - "bugs": { - "url": "https://github.com/postcss/postcss/issues" - }, - "dependencies": { - "nanoid": "^3.3.7", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "browser": { - "./lib/terminal-highlight": false, - "source-map-js": false, - "path": false, - "url": false, - "fs": false - } -} diff --git a/node_modules/pretty-hrtime/.jshintignore b/node_modules/pretty-hrtime/.jshintignore deleted file mode 100644 index cb28eb3..0000000 --- a/node_modules/pretty-hrtime/.jshintignore +++ /dev/null @@ -1 +0,0 @@ -node_modules/** diff --git a/node_modules/pretty-hrtime/.npmignore b/node_modules/pretty-hrtime/.npmignore deleted file mode 100644 index 094a5f3..0000000 --- a/node_modules/pretty-hrtime/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -.DS_Store -*.log -node_modules -build -*.node -components -*.orig -.idea -test -.travis.yml diff --git a/node_modules/pretty-hrtime/LICENSE b/node_modules/pretty-hrtime/LICENSE deleted file mode 100644 index b7346ab..0000000 --- a/node_modules/pretty-hrtime/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 [Richardson & Sons, LLC](http://richardsonandsons.com/) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pretty-hrtime/README.md b/node_modules/pretty-hrtime/README.md deleted file mode 100644 index f4be28d..0000000 --- a/node_modules/pretty-hrtime/README.md +++ /dev/null @@ -1,57 +0,0 @@ -[![Build Status](https://secure.travis-ci.org/robrich/pretty-hrtime.png?branch=master)](https://travis-ci.org/robrich/pretty-hrtime) -[![Dependency Status](https://david-dm.org/robrich/pretty-hrtime.png)](https://david-dm.org/robrich/pretty-hrtime) - -pretty-hrtime -============ - -[process.hrtime()](http://nodejs.org/api/process.html#process_process_hrtime) to words - -Usage ------ - -```javascript -var prettyHrtime = require('pretty-hrtime'); - -var start = process.hrtime(); -// do stuff -var end = process.hrtime(start); - -var words = prettyHrtime(end); -console.log(words); // '1.2 ms' - -words = prettyHrtime(end, {verbose:true}); -console.log(words); // '1 millisecond 209 microseconds' - -words = prettyHrtime(end, {precise:true}); -console.log(words); // '1.20958 ms' -``` - -Note: process.hrtime() has been available since 0.7.6. -See [http://nodejs.org/changelog.html](http://nodejs.org/changelog.html) -and [https://github.com/joyent/node/commit/f06abd](https://github.com/joyent/node/commit/f06abd). - -LICENSE -------- - -(MIT License) - -Copyright (c) 2013 [Richardson & Sons, LLC](http://richardsonandsons.com/) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pretty-hrtime/index.js b/node_modules/pretty-hrtime/index.js deleted file mode 100644 index bed3f89..0000000 --- a/node_modules/pretty-hrtime/index.js +++ /dev/null @@ -1,80 +0,0 @@ -/*jshint node:true */ - -"use strict"; - -var minimalDesc = ['h', 'min', 's', 'ms', 'μs', 'ns']; -var verboseDesc = ['hour', 'minute', 'second', 'millisecond', 'microsecond', 'nanosecond']; -var convert = [60*60, 60, 1, 1e6, 1e3, 1]; - -module.exports = function (source, opts) { - var verbose, precise, i, spot, sourceAtStep, valAtStep, decimals, strAtStep, results, totalSeconds; - - verbose = false; - precise = false; - if (opts) { - verbose = opts.verbose || false; - precise = opts.precise || false; - } - - if (!Array.isArray(source) || source.length !== 2) { - return ''; - } - if (typeof source[0] !== 'number' || typeof source[1] !== 'number') { - return ''; - } - - // normalize source array due to changes in node v5.4+ - if (source[1] < 0) { - totalSeconds = source[0] + source[1] / 1e9; - source[0] = parseInt(totalSeconds); - source[1] = parseFloat((totalSeconds % 1).toPrecision(9)) * 1e9; - } - - results = ''; - - // foreach unit - for (i = 0; i < 6; i++) { - spot = i < 3 ? 0 : 1; // grabbing first or second spot in source array - sourceAtStep = source[spot]; - if (i !== 3 && i !== 0) { - sourceAtStep = sourceAtStep % convert[i-1]; // trim off previous portions - } - if (i === 2) { - sourceAtStep += source[1]/1e9; // get partial seconds from other portion of the array - } - valAtStep = sourceAtStep / convert[i]; // val at this unit - if (valAtStep >= 1) { - if (verbose) { - valAtStep = Math.floor(valAtStep); // deal in whole units, subsequent laps will get the decimal portion - } - if (!precise) { - // don't fling too many decimals - decimals = valAtStep >= 10 ? 0 : 2; - strAtStep = valAtStep.toFixed(decimals); - } else { - strAtStep = valAtStep.toString(); - } - if (strAtStep.indexOf('.') > -1 && strAtStep[strAtStep.length-1] === '0') { - strAtStep = strAtStep.replace(/\.?0+$/,''); // remove trailing zeros - } - if (results) { - results += ' '; // append space if we have a previous value - } - results += strAtStep; // append the value - // append units - if (verbose) { - results += ' '+verboseDesc[i]; - if (strAtStep !== '1') { - results += 's'; - } - } else { - results += ' '+minimalDesc[i]; - } - if (!verbose) { - break; // verbose gets as many groups as necessary, the rest get only one - } - } - } - - return results; -}; diff --git a/node_modules/pretty-hrtime/package.json b/node_modules/pretty-hrtime/package.json deleted file mode 100644 index e4a7985..0000000 --- a/node_modules/pretty-hrtime/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "pretty-hrtime", - "description": "process.hrtime() to words", - "version": "1.0.3", - "homepage": "https://github.com/robrich/pretty-hrtime", - "repository": "git://github.com/robrich/pretty-hrtime.git", - "author": "Rob Richardson (http://robrich.org/)", - "main": "./index.js", - "keywords": [ - "hrtime", - "benchmark" - ], - "devDependencies": { - "jshint": "^2.9.4", - "mocha": "^3.1.2", - "should": "^11.1.1" - }, - "scripts": { - "test": "mocha && jshint ." - }, - "engines": { - "node": ">= 0.8" - }, - "license": "MIT" -} diff --git a/node_modules/queue-microtask/LICENSE b/node_modules/queue-microtask/LICENSE deleted file mode 100755 index c7e6852..0000000 --- a/node_modules/queue-microtask/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/queue-microtask/README.md b/node_modules/queue-microtask/README.md deleted file mode 100644 index 0be05a6..0000000 --- a/node_modules/queue-microtask/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master -[ci-url]: https://github.com/feross/queue-microtask/actions -[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg -[npm-url]: https://npmjs.org/package/queue-microtask -[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg -[downloads-url]: https://npmjs.org/package/queue-microtask -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines - -- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. -- No dependencies. Less than 10 lines. No shims or complicated fallbacks. -- Optimal performance in all modern environments - - Uses `queueMicrotask` in modern environments - - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) - -## install - -``` -npm install queue-microtask -``` - -## usage - -```js -const queueMicrotask = require('queue-microtask') - -queueMicrotask(() => { /* this will run soon */ }) -``` - -## What is `queueMicrotask` and why would one use it? - -The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. - -A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. - -The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. - -Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. - -See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. - -## Who is this package for? - -This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. - -If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. - -## Why not use `process.nextTick`? - -In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. - -You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. - -## Why not use `setTimeout(fn, 0)`? - -This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. - -## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? - -These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. - -This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. - -Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. - -Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. - -## What is a shim? - -> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) - -This package could also be described as a "ponyfill". - -> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) - -## API - -### `queueMicrotask(fn)` - -The `queueMicrotask()` method queues a microtask. - -The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. - -## license - -MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/queue-microtask/index.d.ts b/node_modules/queue-microtask/index.d.ts deleted file mode 100644 index b6a8646..0000000 --- a/node_modules/queue-microtask/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare const queueMicrotask: (cb: () => void) => void -export = queueMicrotask diff --git a/node_modules/queue-microtask/index.js b/node_modules/queue-microtask/index.js deleted file mode 100644 index 5560534..0000000 --- a/node_modules/queue-microtask/index.js +++ /dev/null @@ -1,9 +0,0 @@ -/*! queue-microtask. MIT License. Feross Aboukhadijeh */ -let promise - -module.exports = typeof queueMicrotask === 'function' - ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) - // reuse resolved promise, and allocate it lazily - : cb => (promise || (promise = Promise.resolve())) - .then(cb) - .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/queue-microtask/package.json b/node_modules/queue-microtask/package.json deleted file mode 100644 index d29a401..0000000 --- a/node_modules/queue-microtask/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "queue-microtask", - "description": "fast, tiny `queueMicrotask` shim for modern engines", - "version": "1.2.3", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/queue-microtask/issues" - }, - "devDependencies": { - "standard": "*", - "tape": "^5.2.2" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "homepage": "https://github.com/feross/queue-microtask", - "keywords": [ - "asap", - "immediate", - "micro task", - "microtask", - "nextTick", - "process.nextTick", - "queue micro task", - "queue microtask", - "queue-microtask", - "queueMicrotask", - "setImmediate", - "task" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/queue-microtask.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - } -} diff --git a/node_modules/read-cache/LICENSE b/node_modules/read-cache/LICENSE deleted file mode 100644 index 4b98a41..0000000 --- a/node_modules/read-cache/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright 2016 Bogdan Chadkin - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/read-cache/README.md b/node_modules/read-cache/README.md deleted file mode 100644 index 16a5c36..0000000 --- a/node_modules/read-cache/README.md +++ /dev/null @@ -1,46 +0,0 @@ -# read-cache [![Build Status](https://travis-ci.org/TrySound/read-cache.svg?branch=master)](https://travis-ci.org/TrySound/read-cache) - -Reads and caches the entire contents of a file until it is modified. - - -## Install - -``` -$ npm i read-cache -``` - - -## Usage - -```js -// foo.js -var readCache = require('read-cache'); - -readCache('foo.js').then(function (contents) { - console.log(contents); -}); -``` - - -## API - -### readCache(path[, encoding]) - -Returns a promise that resolves with the file's contents. - -### readCache.sync(path[, encoding]) - -Returns the content of the file. - -### readCache.get(path[, encoding]) - -Returns the content of cached file or null. - -### readCache.clear() - -Clears the contents of the cache. - - -## License - -MIT © [Bogdan Chadkin](mailto:trysound@yandex.ru) diff --git a/node_modules/read-cache/index.js b/node_modules/read-cache/index.js deleted file mode 100644 index b5263e6..0000000 --- a/node_modules/read-cache/index.js +++ /dev/null @@ -1,78 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var pify = require('pify'); - -var stat = pify(fs.stat); -var readFile = pify(fs.readFile); -var resolve = path.resolve; - -var cache = Object.create(null); - -function convert(content, encoding) { - if (Buffer.isEncoding(encoding)) { - return content.toString(encoding); - } - return content; -} - -module.exports = function (path, encoding) { - path = resolve(path); - - return stat(path).then(function (stats) { - var item = cache[path]; - - if (item && item.mtime.getTime() === stats.mtime.getTime()) { - return convert(item.content, encoding); - } - - return readFile(path).then(function (data) { - cache[path] = { - mtime: stats.mtime, - content: data - }; - - return convert(data, encoding); - }); - }).catch(function (err) { - cache[path] = null; - return Promise.reject(err); - }); -}; - -module.exports.sync = function (path, encoding) { - path = resolve(path); - - try { - var stats = fs.statSync(path); - var item = cache[path]; - - if (item && item.mtime.getTime() === stats.mtime.getTime()) { - return convert(item.content, encoding); - } - - var data = fs.readFileSync(path); - - cache[path] = { - mtime: stats.mtime, - content: data - }; - - return convert(data, encoding); - } catch (err) { - cache[path] = null; - throw err; - } - -}; - -module.exports.get = function (path, encoding) { - path = resolve(path); - if (cache[path]) { - return convert(cache[path].content, encoding); - } - return null; -}; - -module.exports.clear = function () { - cache = Object.create(null); -}; diff --git a/node_modules/read-cache/package.json b/node_modules/read-cache/package.json deleted file mode 100644 index 87199b0..0000000 --- a/node_modules/read-cache/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "read-cache", - "version": "1.0.0", - "description": "Reads and caches the entire contents of a file until it is modified", - "files": [ - "index.js" - ], - "main": "index.js", - "scripts": { - "test": "ava" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/TrySound/read-cache.git" - }, - "keywords": [ - "fs", - "read", - "cache" - ], - "author": "Bogdan Chadkin ", - "license": "MIT", - "bugs": { - "url": "https://github.com/TrySound/read-cache/issues" - }, - "homepage": "https://github.com/TrySound/read-cache#readme", - "devDependencies": { - "ava": "^0.9.1", - "del": "^2.2.0" - }, - "dependencies": { - "pify": "^2.3.0" - } -} diff --git a/node_modules/readdirp/LICENSE b/node_modules/readdirp/LICENSE deleted file mode 100644 index 037cbb4..0000000 --- a/node_modules/readdirp/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/readdirp/README.md b/node_modules/readdirp/README.md deleted file mode 100644 index 465593c..0000000 --- a/node_modules/readdirp/README.md +++ /dev/null @@ -1,122 +0,0 @@ -# readdirp [![Weekly downloads](https://img.shields.io/npm/dw/readdirp.svg)](https://github.com/paulmillr/readdirp) - -Recursive version of [fs.readdir](https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback). Exposes a **stream API** and a **promise API**. - - -```sh -npm install readdirp -``` - -```javascript -const readdirp = require('readdirp'); - -// Use streams to achieve small RAM & CPU footprint. -// 1) Streams example with for-await. -for await (const entry of readdirp('.')) { - const {path} = entry; - console.log(`${JSON.stringify({path})}`); -} - -// 2) Streams example, non for-await. -// Print out all JS files along with their size within the current folder & subfolders. -readdirp('.', {fileFilter: '*.js', alwaysStat: true}) - .on('data', (entry) => { - const {path, stats: {size}} = entry; - console.log(`${JSON.stringify({path, size})}`); - }) - // Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted - .on('warn', error => console.error('non-fatal error', error)) - .on('error', error => console.error('fatal error', error)) - .on('end', () => console.log('done')); - -// 3) Promise example. More RAM and CPU than streams / for-await. -const files = await readdirp.promise('.'); -console.log(files.map(file => file.path)); - -// Other options. -readdirp('test', { - fileFilter: '*.js', - directoryFilter: ['!.git', '!*modules'] - // directoryFilter: (di) => di.basename.length === 9 - type: 'files_directories', - depth: 1 -}); -``` - -For more examples, check out `examples` directory. - -## API - -`const stream = readdirp(root[, options])` — **Stream API** - -- Reads given root recursively and returns a `stream` of [entry infos](#entryinfo) -- Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`). -- `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir. -- `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user. -- `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed. -- `on('end')` — we are done. Called when all entries were found and no more will be emitted. -- `on('close')` — stream is destroyed via `stream.destroy()`. - Could be useful if you want to manually abort even on a non fatal error. - At that point the stream is no longer `readable` and no more entries, warning or errors are emitted -- To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html) - or the [stream-handbook](https://github.com/substack/stream-handbook) - -`const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo). - -First argument is awalys `root`, path in which to start reading and recursing into subdirectories. - -### options - -- `fileFilter: ["*.js"]`: filter to include or exclude files. A `Function`, Glob string or Array of glob strings. - - **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry - - **Glob string**: a string (e.g., `*.js`) which is matched using [picomatch](https://github.com/micromatch/picomatch), so go there for more - information. Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense. Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. - - **Array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. - `['*.json', '*.js']` includes all JavaScript and Json files. - `['!.git', '!node_modules']` includes all directories except the '.git' and 'node_modules'. - - Directories that do not pass a filter will not be recursed into. -- `directoryFilter: ['!.git']`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into. -- `depth: 5`: depth at which to stop recursing even if more subdirectories are found -- `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. -- `alwaysStat: false`: always return `stats` property for every file. Default is `false`, readdirp will return `Dirent` entries. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0. -- `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat` - -### `EntryInfo` - -Has the following properties: - -- `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root) -- `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found -- `basename: 'react.js'`: name of the file/directory -- `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false` -- `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true` - -## Changelog - -- 3.5 (Oct 13, 2020) disallows recursive directory-based symlinks. - Before, it could have entered infinite loop. -- 3.4 (Mar 19, 2020) adds support for directory-based symlinks. -- 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping. -- 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic. -- 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions". -- 3.0 brings huge performance improvements and stream backpressure support. -- Upgrading 2.x to 3.x: - - Signature changed from `readdirp(options)` to `readdirp(root, options)` - - Replaced callback API with promise API. - - Renamed `entryType` option to `type` - - Renamed `entryType: 'both'` to `'files_directories'` - - `EntryInfo` - - Renamed `stat` to `stats` - - Emitted only when `alwaysStat: true` - - `dirent` is emitted instead of `stats` by default with `alwaysStat: false` - - Renamed `name` to `basename` - - Removed `parentDir` and `fullParentDir` properties -- Supported node.js versions: - - 3.x: node 8+ - - 2.x: node 0.6+ - -## License - -Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller () - -MIT License, see [LICENSE](LICENSE) file. diff --git a/node_modules/readdirp/index.d.ts b/node_modules/readdirp/index.d.ts deleted file mode 100644 index cbbd76c..0000000 --- a/node_modules/readdirp/index.d.ts +++ /dev/null @@ -1,43 +0,0 @@ -// TypeScript Version: 3.2 - -/// - -import * as fs from 'fs'; -import { Readable } from 'stream'; - -declare namespace readdir { - interface EntryInfo { - path: string; - fullPath: string; - basename: string; - stats?: fs.Stats; - dirent?: fs.Dirent; - } - - interface ReaddirpOptions { - root?: string; - fileFilter?: string | string[] | ((entry: EntryInfo) => boolean); - directoryFilter?: string | string[] | ((entry: EntryInfo) => boolean); - type?: 'files' | 'directories' | 'files_directories' | 'all'; - lstat?: boolean; - depth?: number; - alwaysStat?: boolean; - } - - interface ReaddirpStream extends Readable, AsyncIterable { - read(): EntryInfo; - [Symbol.asyncIterator](): AsyncIterableIterator; - } - - function promise( - root: string, - options?: ReaddirpOptions - ): Promise; -} - -declare function readdir( - root: string, - options?: readdir.ReaddirpOptions -): readdir.ReaddirpStream; - -export = readdir; diff --git a/node_modules/readdirp/index.js b/node_modules/readdirp/index.js deleted file mode 100644 index cf739b2..0000000 --- a/node_modules/readdirp/index.js +++ /dev/null @@ -1,287 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const { Readable } = require('stream'); -const sysPath = require('path'); -const { promisify } = require('util'); -const picomatch = require('picomatch'); - -const readdir = promisify(fs.readdir); -const stat = promisify(fs.stat); -const lstat = promisify(fs.lstat); -const realpath = promisify(fs.realpath); - -/** - * @typedef {Object} EntryInfo - * @property {String} path - * @property {String} fullPath - * @property {fs.Stats=} stats - * @property {fs.Dirent=} dirent - * @property {String} basename - */ - -const BANG = '!'; -const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR'; -const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]); -const FILE_TYPE = 'files'; -const DIR_TYPE = 'directories'; -const FILE_DIR_TYPE = 'files_directories'; -const EVERYTHING_TYPE = 'all'; -const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE]; - -const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code); -const [maj, min] = process.versions.node.split('.').slice(0, 2).map(n => Number.parseInt(n, 10)); -const wantBigintFsStats = process.platform === 'win32' && (maj > 10 || (maj === 10 && min >= 5)); - -const normalizeFilter = filter => { - if (filter === undefined) return; - if (typeof filter === 'function') return filter; - - if (typeof filter === 'string') { - const glob = picomatch(filter.trim()); - return entry => glob(entry.basename); - } - - if (Array.isArray(filter)) { - const positive = []; - const negative = []; - for (const item of filter) { - const trimmed = item.trim(); - if (trimmed.charAt(0) === BANG) { - negative.push(picomatch(trimmed.slice(1))); - } else { - positive.push(picomatch(trimmed)); - } - } - - if (negative.length > 0) { - if (positive.length > 0) { - return entry => - positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename)); - } - return entry => !negative.some(f => f(entry.basename)); - } - return entry => positive.some(f => f(entry.basename)); - } -}; - -class ReaddirpStream extends Readable { - static get defaultOptions() { - return { - root: '.', - /* eslint-disable no-unused-vars */ - fileFilter: (path) => true, - directoryFilter: (path) => true, - /* eslint-enable no-unused-vars */ - type: FILE_TYPE, - lstat: false, - depth: 2147483648, - alwaysStat: false - }; - } - - constructor(options = {}) { - super({ - objectMode: true, - autoDestroy: true, - highWaterMark: options.highWaterMark || 4096 - }); - const opts = { ...ReaddirpStream.defaultOptions, ...options }; - const { root, type } = opts; - - this._fileFilter = normalizeFilter(opts.fileFilter); - this._directoryFilter = normalizeFilter(opts.directoryFilter); - - const statMethod = opts.lstat ? lstat : stat; - // Use bigint stats if it's windows and stat() supports options (node 10+). - if (wantBigintFsStats) { - this._stat = path => statMethod(path, { bigint: true }); - } else { - this._stat = statMethod; - } - - this._maxDepth = opts.depth; - this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); - this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); - this._wantsEverything = type === EVERYTHING_TYPE; - this._root = sysPath.resolve(root); - this._isDirent = ('Dirent' in fs) && !opts.alwaysStat; - this._statsProp = this._isDirent ? 'dirent' : 'stats'; - this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent }; - - // Launch stream with one parent, the root dir. - this.parents = [this._exploreDir(root, 1)]; - this.reading = false; - this.parent = undefined; - } - - async _read(batch) { - if (this.reading) return; - this.reading = true; - - try { - while (!this.destroyed && batch > 0) { - const { path, depth, files = [] } = this.parent || {}; - - if (files.length > 0) { - const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path)); - for (const entry of await Promise.all(slice)) { - if (this.destroyed) return; - - const entryType = await this._getEntryType(entry); - if (entryType === 'directory' && this._directoryFilter(entry)) { - if (depth <= this._maxDepth) { - this.parents.push(this._exploreDir(entry.fullPath, depth + 1)); - } - - if (this._wantsDir) { - this.push(entry); - batch--; - } - } else if ((entryType === 'file' || this._includeAsFile(entry)) && this._fileFilter(entry)) { - if (this._wantsFile) { - this.push(entry); - batch--; - } - } - } - } else { - const parent = this.parents.pop(); - if (!parent) { - this.push(null); - break; - } - this.parent = await parent; - if (this.destroyed) return; - } - } - } catch (error) { - this.destroy(error); - } finally { - this.reading = false; - } - } - - async _exploreDir(path, depth) { - let files; - try { - files = await readdir(path, this._rdOptions); - } catch (error) { - this._onError(error); - } - return { files, depth, path }; - } - - async _formatEntry(dirent, path) { - let entry; - try { - const basename = this._isDirent ? dirent.name : dirent; - const fullPath = sysPath.resolve(sysPath.join(path, basename)); - entry = { path: sysPath.relative(this._root, fullPath), fullPath, basename }; - entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath); - } catch (err) { - this._onError(err); - } - return entry; - } - - _onError(err) { - if (isNormalFlowError(err) && !this.destroyed) { - this.emit('warn', err); - } else { - this.destroy(err); - } - } - - async _getEntryType(entry) { - // entry may be undefined, because a warning or an error were emitted - // and the statsProp is undefined - const stats = entry && entry[this._statsProp]; - if (!stats) { - return; - } - if (stats.isFile()) { - return 'file'; - } - if (stats.isDirectory()) { - return 'directory'; - } - if (stats && stats.isSymbolicLink()) { - const full = entry.fullPath; - try { - const entryRealPath = await realpath(full); - const entryRealPathStats = await lstat(entryRealPath); - if (entryRealPathStats.isFile()) { - return 'file'; - } - if (entryRealPathStats.isDirectory()) { - const len = entryRealPath.length; - if (full.startsWith(entryRealPath) && full.substr(len, 1) === sysPath.sep) { - const recursiveError = new Error( - `Circular symlink detected: "${full}" points to "${entryRealPath}"` - ); - recursiveError.code = RECURSIVE_ERROR_CODE; - return this._onError(recursiveError); - } - return 'directory'; - } - } catch (error) { - this._onError(error); - } - } - } - - _includeAsFile(entry) { - const stats = entry && entry[this._statsProp]; - - return stats && this._wantsEverything && !stats.isDirectory(); - } -} - -/** - * @typedef {Object} ReaddirpArguments - * @property {Function=} fileFilter - * @property {Function=} directoryFilter - * @property {String=} type - * @property {Number=} depth - * @property {String=} root - * @property {Boolean=} lstat - * @property {Boolean=} bigint - */ - -/** - * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. - * @param {String} root Root directory - * @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth - */ -const readdirp = (root, options = {}) => { - let type = options.entryType || options.type; - if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility - if (type) options.type = type; - if (!root) { - throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)'); - } else if (typeof root !== 'string') { - throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)'); - } else if (type && !ALL_TYPES.includes(type)) { - throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`); - } - - options.root = root; - return new ReaddirpStream(options); -}; - -const readdirpPromise = (root, options = {}) => { - return new Promise((resolve, reject) => { - const files = []; - readdirp(root, options) - .on('data', entry => files.push(entry)) - .on('end', () => resolve(files)) - .on('error', error => reject(error)); - }); -}; - -readdirp.promise = readdirpPromise; -readdirp.ReaddirpStream = ReaddirpStream; -readdirp.default = readdirp; - -module.exports = readdirp; diff --git a/node_modules/readdirp/package.json b/node_modules/readdirp/package.json deleted file mode 100644 index dba5388..0000000 --- a/node_modules/readdirp/package.json +++ /dev/null @@ -1,122 +0,0 @@ -{ - "name": "readdirp", - "description": "Recursive version of fs.readdir with streaming API.", - "version": "3.6.0", - "homepage": "https://github.com/paulmillr/readdirp", - "repository": { - "type": "git", - "url": "git://github.com/paulmillr/readdirp.git" - }, - "license": "MIT", - "bugs": { - "url": "https://github.com/paulmillr/readdirp/issues" - }, - "author": "Thorsten Lorenz (thlorenz.com)", - "contributors": [ - "Thorsten Lorenz (thlorenz.com)", - "Paul Miller (https://paulmillr.com)" - ], - "main": "index.js", - "engines": { - "node": ">=8.10.0" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "recursive", - "fs", - "stream", - "streams", - "readdir", - "filesystem", - "find", - "filter" - ], - "scripts": { - "dtslint": "dtslint", - "nyc": "nyc", - "mocha": "mocha --exit", - "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", - "test": "npm run lint && nyc npm run mocha" - }, - "dependencies": { - "picomatch": "^2.2.1" - }, - "devDependencies": { - "@types/node": "^14", - "chai": "^4.2", - "chai-subset": "^1.6", - "dtslint": "^3.3.0", - "eslint": "^7.0.0", - "mocha": "^7.1.1", - "nyc": "^15.0.0", - "rimraf": "^3.0.0", - "typescript": "^4.0.3" - }, - "nyc": { - "reporter": [ - "html", - "text" - ] - }, - "eslintConfig": { - "root": true, - "extends": "eslint:recommended", - "parserOptions": { - "ecmaVersion": 9, - "sourceType": "script" - }, - "env": { - "node": true, - "es6": true - }, - "rules": { - "array-callback-return": "error", - "no-empty": [ - "error", - { - "allowEmptyCatch": true - } - ], - "no-else-return": [ - "error", - { - "allowElseIf": false - } - ], - "no-lonely-if": "error", - "no-var": "error", - "object-shorthand": "error", - "prefer-arrow-callback": [ - "error", - { - "allowNamedFunctions": true - } - ], - "prefer-const": [ - "error", - { - "ignoreReadBeforeAssign": true - } - ], - "prefer-destructuring": [ - "error", - { - "object": true, - "array": false - } - ], - "prefer-spread": "error", - "prefer-template": "error", - "radix": "error", - "semi": "error", - "strict": "error", - "quotes": [ - "error", - "single" - ] - } - } -} diff --git a/node_modules/require-directory/.jshintrc b/node_modules/require-directory/.jshintrc deleted file mode 100644 index e14e4dc..0000000 --- a/node_modules/require-directory/.jshintrc +++ /dev/null @@ -1,67 +0,0 @@ -{ - "maxerr" : 50, - "bitwise" : true, - "camelcase" : true, - "curly" : true, - "eqeqeq" : true, - "forin" : true, - "immed" : true, - "indent" : 2, - "latedef" : true, - "newcap" : true, - "noarg" : true, - "noempty" : true, - "nonew" : true, - "plusplus" : true, - "quotmark" : true, - "undef" : true, - "unused" : true, - "strict" : true, - "trailing" : true, - "maxparams" : false, - "maxdepth" : false, - "maxstatements" : false, - "maxcomplexity" : false, - "maxlen" : false, - "asi" : false, - "boss" : false, - "debug" : false, - "eqnull" : true, - "es5" : false, - "esnext" : false, - "moz" : false, - "evil" : false, - "expr" : true, - "funcscope" : true, - "globalstrict" : true, - "iterator" : true, - "lastsemic" : false, - "laxbreak" : false, - "laxcomma" : false, - "loopfunc" : false, - "multistr" : false, - "proto" : false, - "scripturl" : false, - "smarttabs" : false, - "shadow" : false, - "sub" : false, - "supernew" : false, - "validthis" : false, - "browser" : true, - "couch" : false, - "devel" : true, - "dojo" : false, - "jquery" : false, - "mootools" : false, - "node" : true, - "nonstandard" : false, - "prototypejs" : false, - "rhino" : false, - "worker" : false, - "wsh" : false, - "yui" : false, - "nomen" : true, - "onevar" : true, - "passfail" : false, - "white" : true -} diff --git a/node_modules/require-directory/.npmignore b/node_modules/require-directory/.npmignore deleted file mode 100644 index 47cf365..0000000 --- a/node_modules/require-directory/.npmignore +++ /dev/null @@ -1 +0,0 @@ -test/** diff --git a/node_modules/require-directory/.travis.yml b/node_modules/require-directory/.travis.yml deleted file mode 100644 index 20fd86b..0000000 --- a/node_modules/require-directory/.travis.yml +++ /dev/null @@ -1,3 +0,0 @@ -language: node_js -node_js: - - 0.10 diff --git a/node_modules/require-directory/LICENSE b/node_modules/require-directory/LICENSE deleted file mode 100644 index a70f253..0000000 --- a/node_modules/require-directory/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2011 Troy Goode - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/require-directory/README.markdown b/node_modules/require-directory/README.markdown deleted file mode 100644 index 926a063..0000000 --- a/node_modules/require-directory/README.markdown +++ /dev/null @@ -1,184 +0,0 @@ -# require-directory - -Recursively iterates over specified directory, `require()`'ing each file, and returning a nested hash structure containing those modules. - -**[Follow me (@troygoode) on Twitter!](https://twitter.com/intent/user?screen_name=troygoode)** - -[![NPM](https://nodei.co/npm/require-directory.png?downloads=true&stars=true)](https://nodei.co/npm/require-directory/) - -[![build status](https://secure.travis-ci.org/troygoode/node-require-directory.png)](http://travis-ci.org/troygoode/node-require-directory) - -## How To Use - -### Installation (via [npm](https://npmjs.org/package/require-directory)) - -```bash -$ npm install require-directory -``` - -### Usage - -A common pattern in node.js is to include an index file which creates a hash of the files in its current directory. Given a directory structure like so: - -* app.js -* routes/ - * index.js - * home.js - * auth/ - * login.js - * logout.js - * register.js - -`routes/index.js` uses `require-directory` to build the hash (rather than doing so manually) like so: - -```javascript -var requireDirectory = require('require-directory'); -module.exports = requireDirectory(module); -``` - -`app.js` references `routes/index.js` like any other module, but it now has a hash/tree of the exports from the `./routes/` directory: - -```javascript -var routes = require('./routes'); - -// snip - -app.get('/', routes.home); -app.get('/register', routes.auth.register); -app.get('/login', routes.auth.login); -app.get('/logout', routes.auth.logout); -``` - -The `routes` variable above is the equivalent of this: - -```javascript -var routes = { - home: require('routes/home.js'), - auth: { - login: require('routes/auth/login.js'), - logout: require('routes/auth/logout.js'), - register: require('routes/auth/register.js') - } -}; -``` - -*Note that `routes.index` will be `undefined` as you would hope.* - -### Specifying Another Directory - -You can specify which directory you want to build a tree of (if it isn't the current directory for whatever reason) by passing it as the second parameter. Not specifying the path (`requireDirectory(module)`) is the equivelant of `requireDirectory(module, __dirname)`: - -```javascript -var requireDirectory = require('require-directory'); -module.exports = requireDirectory(module, './some/subdirectory'); -``` - -For example, in the [example in the Usage section](#usage) we could have avoided creating `routes/index.js` and instead changed the first lines of `app.js` to: - -```javascript -var requireDirectory = require('require-directory'); -var routes = requireDirectory(module, './routes'); -``` - -## Options - -You can pass an options hash to `require-directory` as the 2nd parameter (or 3rd if you're passing the path to another directory as the 2nd parameter already). Here are the available options: - -### Whitelisting - -Whitelisting (either via RegExp or function) allows you to specify that only certain files be loaded. - -```javascript -var requireDirectory = require('require-directory'), - whitelist = /onlyinclude.js$/, - hash = requireDirectory(module, {include: whitelist}); -``` - -```javascript -var requireDirectory = require('require-directory'), - check = function(path){ - if(/onlyinclude.js$/.test(path)){ - return true; // don't include - }else{ - return false; // go ahead and include - } - }, - hash = requireDirectory(module, {include: check}); -``` - -### Blacklisting - -Blacklisting (either via RegExp or function) allows you to specify that all but certain files should be loaded. - -```javascript -var requireDirectory = require('require-directory'), - blacklist = /dontinclude\.js$/, - hash = requireDirectory(module, {exclude: blacklist}); -``` - -```javascript -var requireDirectory = require('require-directory'), - check = function(path){ - if(/dontinclude\.js$/.test(path)){ - return false; // don't include - }else{ - return true; // go ahead and include - } - }, - hash = requireDirectory(module, {exclude: check}); -``` - -### Visiting Objects As They're Loaded - -`require-directory` takes a function as the `visit` option that will be called for each module that is added to module.exports. - -```javascript -var requireDirectory = require('require-directory'), - visitor = function(obj) { - console.log(obj); // will be called for every module that is loaded - }, - hash = requireDirectory(module, {visit: visitor}); -``` - -The visitor can also transform the objects by returning a value: - -```javascript -var requireDirectory = require('require-directory'), - visitor = function(obj) { - return obj(new Date()); - }, - hash = requireDirectory(module, {visit: visitor}); -``` - -### Renaming Keys - -```javascript -var requireDirectory = require('require-directory'), - renamer = function(name) { - return name.toUpperCase(); - }, - hash = requireDirectory(module, {rename: renamer}); -``` - -### No Recursion - -```javascript -var requireDirectory = require('require-directory'), - hash = requireDirectory(module, {recurse: false}); -``` - -## Run Unit Tests - -```bash -$ npm run lint -$ npm test -``` - -## License - -[MIT License](http://www.opensource.org/licenses/mit-license.php) - -## Author - -[Troy Goode](https://github.com/TroyGoode) ([troygoode@gmail.com](mailto:troygoode@gmail.com)) - diff --git a/node_modules/require-directory/index.js b/node_modules/require-directory/index.js deleted file mode 100644 index cd37da7..0000000 --- a/node_modules/require-directory/index.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict'; - -var fs = require('fs'), - join = require('path').join, - resolve = require('path').resolve, - dirname = require('path').dirname, - defaultOptions = { - extensions: ['js', 'json', 'coffee'], - recurse: true, - rename: function (name) { - return name; - }, - visit: function (obj) { - return obj; - } - }; - -function checkFileInclusion(path, filename, options) { - return ( - // verify file has valid extension - (new RegExp('\\.(' + options.extensions.join('|') + ')$', 'i').test(filename)) && - - // if options.include is a RegExp, evaluate it and make sure the path passes - !(options.include && options.include instanceof RegExp && !options.include.test(path)) && - - // if options.include is a function, evaluate it and make sure the path passes - !(options.include && typeof options.include === 'function' && !options.include(path, filename)) && - - // if options.exclude is a RegExp, evaluate it and make sure the path doesn't pass - !(options.exclude && options.exclude instanceof RegExp && options.exclude.test(path)) && - - // if options.exclude is a function, evaluate it and make sure the path doesn't pass - !(options.exclude && typeof options.exclude === 'function' && options.exclude(path, filename)) - ); -} - -function requireDirectory(m, path, options) { - var retval = {}; - - // path is optional - if (path && !options && typeof path !== 'string') { - options = path; - path = null; - } - - // default options - options = options || {}; - for (var prop in defaultOptions) { - if (typeof options[prop] === 'undefined') { - options[prop] = defaultOptions[prop]; - } - } - - // if no path was passed in, assume the equivelant of __dirname from caller - // otherwise, resolve path relative to the equivalent of __dirname - path = !path ? dirname(m.filename) : resolve(dirname(m.filename), path); - - // get the path of each file in specified directory, append to current tree node, recurse - fs.readdirSync(path).forEach(function (filename) { - var joined = join(path, filename), - files, - key, - obj; - - if (fs.statSync(joined).isDirectory() && options.recurse) { - // this node is a directory; recurse - files = requireDirectory(m, joined, options); - // exclude empty directories - if (Object.keys(files).length) { - retval[options.rename(filename, joined, filename)] = files; - } - } else { - if (joined !== m.filename && checkFileInclusion(joined, filename, options)) { - // hash node key shouldn't include file extension - key = filename.substring(0, filename.lastIndexOf('.')); - obj = m.require(joined); - retval[options.rename(key, joined, filename)] = options.visit(obj, joined, filename) || obj; - } - } - }); - - return retval; -} - -module.exports = requireDirectory; -module.exports.defaults = defaultOptions; diff --git a/node_modules/require-directory/package.json b/node_modules/require-directory/package.json deleted file mode 100644 index 25ece4b..0000000 --- a/node_modules/require-directory/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "author": "Troy Goode (http://github.com/troygoode/)", - "name": "require-directory", - "version": "2.1.1", - "description": "Recursively iterates over specified directory, require()'ing each file, and returning a nested hash structure containing those modules.", - "keywords": [ - "require", - "directory", - "library", - "recursive" - ], - "homepage": "https://github.com/troygoode/node-require-directory/", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/troygoode/node-require-directory.git" - }, - "contributors": [ - { - "name": "Troy Goode", - "email": "troygoode@gmail.com", - "web": "http://github.com/troygoode/" - } - ], - "license": "MIT", - "bugs": { - "url": "http://github.com/troygoode/node-require-directory/issues/" - }, - "engines": { - "node": ">=0.10.0" - }, - "devDependencies": { - "jshint": "^2.6.0", - "mocha": "^2.1.0" - }, - "scripts": { - "test": "mocha", - "lint": "jshint index.js test/test.js" - } -} diff --git a/node_modules/reusify/.coveralls.yml b/node_modules/reusify/.coveralls.yml deleted file mode 100644 index 359f683..0000000 --- a/node_modules/reusify/.coveralls.yml +++ /dev/null @@ -1 +0,0 @@ -repo_token: yIxhFqtaaz5iGVYfie9mODehFYogm8S8L diff --git a/node_modules/reusify/.travis.yml b/node_modules/reusify/.travis.yml deleted file mode 100644 index 1970476..0000000 --- a/node_modules/reusify/.travis.yml +++ /dev/null @@ -1,28 +0,0 @@ -language: node_js -sudo: false - -node_js: - - 9 - - 8 - - 7 - - 6 - - 5 - - 4 - - 4.0 - - iojs-v3 - - iojs-v2 - - iojs-v1 - - 0.12 - - 0.10 - -cache: - directories: - - node_modules - -after_script: -- npm run coverage - -notifications: - email: - on_success: never - on_failure: always diff --git a/node_modules/reusify/LICENSE b/node_modules/reusify/LICENSE deleted file mode 100644 index fbf3a01..0000000 --- a/node_modules/reusify/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Matteo Collina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/reusify/README.md b/node_modules/reusify/README.md deleted file mode 100644 index badcb7c..0000000 --- a/node_modules/reusify/README.md +++ /dev/null @@ -1,145 +0,0 @@ -# reusify - -[![npm version][npm-badge]][npm-url] -[![Build Status][travis-badge]][travis-url] -[![Coverage Status][coveralls-badge]][coveralls-url] - -Reuse your objects and functions for maximum speed. This technique will -make any function run ~10% faster. You call your functions a -lot, and it adds up quickly in hot code paths. - -``` -$ node benchmarks/createNoCodeFunction.js -Total time 53133 -Total iterations 100000000 -Iteration/s 1882069.5236482036 - -$ node benchmarks/reuseNoCodeFunction.js -Total time 50617 -Total iterations 100000000 -Iteration/s 1975620.838848608 -``` - -The above benchmark uses fibonacci to simulate a real high-cpu load. -The actual numbers might differ for your use case, but the difference -should not. - -The benchmark was taken using Node v6.10.0. - -This library was extracted from -[fastparallel](http://npm.im/fastparallel). - -## Example - -```js -var reusify = require('reusify') -var fib = require('reusify/benchmarks/fib') -var instance = reusify(MyObject) - -// get an object from the cache, -// or creates a new one when cache is empty -var obj = instance.get() - -// set the state -obj.num = 100 -obj.func() - -// reset the state. -// if the state contains any external object -// do not use delete operator (it is slow) -// prefer set them to null -obj.num = 0 - -// store an object in the cache -instance.release(obj) - -function MyObject () { - // you need to define this property - // so V8 can compile MyObject into an - // hidden class - this.next = null - this.num = 0 - - var that = this - - // this function is never reallocated, - // so it can be optimized by V8 - this.func = function () { - if (null) { - // do nothing - } else { - // calculates fibonacci - fib(that.num) - } - } -} -``` - -The above example was intended for synchronous code, let's see async: -```js -var reusify = require('reusify') -var instance = reusify(MyObject) - -for (var i = 0; i < 100; i++) { - getData(i, console.log) -} - -function getData (value, cb) { - var obj = instance.get() - - obj.value = value - obj.cb = cb - obj.run() -} - -function MyObject () { - this.next = null - this.value = null - - var that = this - - this.run = function () { - asyncOperation(that.value, that.handle) - } - - this.handle = function (err, result) { - that.cb(err, result) - that.value = null - that.cb = null - instance.release(that) - } -} -``` - -Also note how in the above examples, the code, that consumes an istance of `MyObject`, -reset the state to initial condition, just before storing it in the cache. -That's needed so that every subsequent request for an instance from the cache, -could get a clean instance. - -## Why - -It is faster because V8 doesn't have to collect all the functions you -create. On a short-lived benchmark, it is as fast as creating the -nested function, but on a longer time frame it creates less -pressure on the garbage collector. - -## Other examples -If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). - -## Acknowledgements - -Thanks to [Trevor Norris](https://github.com/trevnorris) for -getting me down the rabbit hole of performance, and thanks to [Mathias -Buss](http://github.com/mafintosh) for suggesting me to share this -trick. - -## License - -MIT - -[npm-badge]: https://badge.fury.io/js/reusify.svg -[npm-url]: https://badge.fury.io/js/reusify -[travis-badge]: https://api.travis-ci.org/mcollina/reusify.svg -[travis-url]: https://travis-ci.org/mcollina/reusify -[coveralls-badge]: https://coveralls.io/repos/mcollina/reusify/badge.svg?branch=master&service=github -[coveralls-url]: https://coveralls.io/github/mcollina/reusify?branch=master diff --git a/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/reusify/benchmarks/createNoCodeFunction.js deleted file mode 100644 index ce1aac7..0000000 --- a/node_modules/reusify/benchmarks/createNoCodeFunction.js +++ /dev/null @@ -1,30 +0,0 @@ -'use strict' - -var fib = require('./fib') -var max = 100000000 -var start = Date.now() - -// create a funcion with the typical error -// pattern, that delegates the heavy load -// to something else -function createNoCodeFunction () { - /* eslint no-constant-condition: "off" */ - var num = 100 - - ;(function () { - if (null) { - // do nothing - } else { - fib(num) - } - })() -} - -for (var i = 0; i < max; i++) { - createNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/benchmarks/fib.js b/node_modules/reusify/benchmarks/fib.js deleted file mode 100644 index e22cc48..0000000 --- a/node_modules/reusify/benchmarks/fib.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -function fib (num) { - var fib = [] - - fib[0] = 0 - fib[1] = 1 - for (var i = 2; i <= num; i++) { - fib[i] = fib[i - 2] + fib[i - 1] - } -} - -module.exports = fib diff --git a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js deleted file mode 100644 index 3358d6e..0000000 --- a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict' - -var reusify = require('../') -var fib = require('./fib') -var instance = reusify(MyObject) -var max = 100000000 -var start = Date.now() - -function reuseNoCodeFunction () { - var obj = instance.get() - obj.num = 100 - obj.func() - obj.num = 0 - instance.release(obj) -} - -function MyObject () { - this.next = null - var that = this - this.num = 0 - this.func = function () { - /* eslint no-constant-condition: "off" */ - if (null) { - // do nothing - } else { - fib(that.num) - } - } -} - -for (var i = 0; i < max; i++) { - reuseNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/package.json b/node_modules/reusify/package.json deleted file mode 100644 index ee66aee..0000000 --- a/node_modules/reusify/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "reusify", - "version": "1.0.4", - "description": "Reuse objects and functions with style", - "main": "reusify.js", - "scripts": { - "lint": "standard", - "test": "tape test.js | faucet", - "istanbul": "istanbul cover tape test.js", - "coverage": "npm run istanbul; cat coverage/lcov.info | coveralls" - }, - "pre-commit": [ - "lint", - "test" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/reusify.git" - }, - "keywords": [ - "reuse", - "object", - "performance", - "function", - "fast" - ], - "author": "Matteo Collina ", - "license": "MIT", - "bugs": { - "url": "https://github.com/mcollina/reusify/issues" - }, - "homepage": "https://github.com/mcollina/reusify#readme", - "engines": { - "node": ">=0.10.0", - "iojs": ">=1.0.0" - }, - "devDependencies": { - "coveralls": "^2.13.3", - "faucet": "0.0.1", - "istanbul": "^0.4.5", - "pre-commit": "^1.2.2", - "standard": "^10.0.3", - "tape": "^4.8.0" - } -} diff --git a/node_modules/reusify/reusify.js b/node_modules/reusify/reusify.js deleted file mode 100644 index e6f36f3..0000000 --- a/node_modules/reusify/reusify.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -function reusify (Constructor) { - var head = new Constructor() - var tail = head - - function get () { - var current = head - - if (current.next) { - head = current.next - } else { - head = new Constructor() - tail = head - } - - current.next = null - - return current - } - - function release (obj) { - tail.next = obj - tail = obj - } - - return { - get: get, - release: release - } -} - -module.exports = reusify diff --git a/node_modules/reusify/test.js b/node_modules/reusify/test.js deleted file mode 100644 index 929cfd7..0000000 --- a/node_modules/reusify/test.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -var test = require('tape') -var reusify = require('./') - -test('reuse objects', function (t) { - t.plan(6) - - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - - t.notEqual(obj, instance.get(), 'two instance created') - t.notOk(obj.next, 'next must be null') - - instance.release(obj) - - // the internals keeps a hot copy ready for reuse - // putting this one back in the queue - instance.release(instance.get()) - - // comparing the old one with the one we got - // never do this in real code, after release you - // should never reuse that instance - t.equal(obj, instance.get(), 'instance must be reused') -}) - -test('reuse more than 2 objects', function (t) { - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - var obj2 = instance.get() - var obj3 = instance.get() - - t.notOk(obj.next, 'next must be null') - t.notOk(obj2.next, 'next must be null') - t.notOk(obj3.next, 'next must be null') - - t.notEqual(obj, obj2) - t.notEqual(obj, obj3) - t.notEqual(obj3, obj2) - - instance.release(obj) - instance.release(obj2) - instance.release(obj3) - - // skip one - instance.get() - - var obj4 = instance.get() - var obj5 = instance.get() - var obj6 = instance.get() - - t.equal(obj4, obj) - t.equal(obj5, obj2) - t.equal(obj6, obj3) - t.end() -}) diff --git a/node_modules/run-parallel/LICENSE b/node_modules/run-parallel/LICENSE deleted file mode 100644 index c7e6852..0000000 --- a/node_modules/run-parallel/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/run-parallel/README.md b/node_modules/run-parallel/README.md deleted file mode 100644 index edc3da4..0000000 --- a/node_modules/run-parallel/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg -[travis-url]: https://travis-ci.org/feross/run-parallel -[npm-image]: https://img.shields.io/npm/v/run-parallel.svg -[npm-url]: https://npmjs.org/package/run-parallel -[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg -[downloads-url]: https://npmjs.org/package/run-parallel -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### Run an array of functions in parallel - -![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) - -### install - -``` -npm install run-parallel -``` - -### usage - -#### parallel(tasks, [callback]) - -Run the `tasks` array of functions in parallel, without waiting until the previous -function has completed. If any of the functions pass an error to its callback, the main -`callback` is immediately called with the value of the error. Once the `tasks` have -completed, the results are passed to the final `callback` as an array. - -It is also possible to use an object instead of an array. Each property will be run as a -function and the results will be passed to the final `callback` as an object instead of -an array. This can be a more readable way of handling the results. - -##### arguments - -- `tasks` - An array or object containing functions to run. Each function is passed a -`callback(err, result)` which it must call on completion with an error `err` (which can -be `null`) and an optional `result` value. -- `callback(err, results)` - An optional callback to run once all the functions have -completed. This function gets a results array (or object) containing all the result -arguments passed to the task callbacks. - -##### example - -```js -var parallel = require('run-parallel') - -parallel([ - function (callback) { - setTimeout(function () { - callback(null, 'one') - }, 200) - }, - function (callback) { - setTimeout(function () { - callback(null, 'two') - }, 100) - } -], -// optional callback -function (err, results) { - // the results array will equal ['one','two'] even though - // the second function had a shorter timeout. -}) -``` - -This module is basically equavalent to -[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's -handy to just have the one function you need instead of the kitchen sink. Modularity! -Especially handy if you're serving to the browser and need to reduce your javascript -bundle size. - -Works great in the browser with [browserify](http://browserify.org/)! - -### see also - -- [run-auto](https://github.com/feross/run-auto) -- [run-parallel-limit](https://github.com/feross/run-parallel-limit) -- [run-series](https://github.com/feross/run-series) -- [run-waterfall](https://github.com/feross/run-waterfall) - -### license - -MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/run-parallel/index.js b/node_modules/run-parallel/index.js deleted file mode 100644 index 6307141..0000000 --- a/node_modules/run-parallel/index.js +++ /dev/null @@ -1,51 +0,0 @@ -/*! run-parallel. MIT License. Feross Aboukhadijeh */ -module.exports = runParallel - -const queueMicrotask = require('queue-microtask') - -function runParallel (tasks, cb) { - let results, pending, keys - let isSync = true - - if (Array.isArray(tasks)) { - results = [] - pending = tasks.length - } else { - keys = Object.keys(tasks) - results = {} - pending = keys.length - } - - function done (err) { - function end () { - if (cb) cb(err, results) - cb = null - } - if (isSync) queueMicrotask(end) - else end() - } - - function each (i, err, result) { - results[i] = result - if (--pending === 0 || err) { - done(err) - } - } - - if (!pending) { - // empty - done(null) - } else if (keys) { - // object - keys.forEach(function (key) { - tasks[key](function (err, result) { each(key, err, result) }) - }) - } else { - // array - tasks.forEach(function (task, i) { - task(function (err, result) { each(i, err, result) }) - }) - } - - isSync = false -} diff --git a/node_modules/run-parallel/package.json b/node_modules/run-parallel/package.json deleted file mode 100644 index 1f14757..0000000 --- a/node_modules/run-parallel/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "run-parallel", - "description": "Run an array of functions in parallel", - "version": "1.2.0", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/run-parallel/issues" - }, - "dependencies": { - "queue-microtask": "^1.2.2" - }, - "devDependencies": { - "airtap": "^3.0.0", - "standard": "*", - "tape": "^5.0.1" - }, - "homepage": "https://github.com/feross/run-parallel", - "keywords": [ - "parallel", - "async", - "function", - "callback", - "asynchronous", - "run", - "array", - "run parallel" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/run-parallel.git" - }, - "scripts": { - "test": "standard && npm run test-node && npm run test-browser", - "test-browser": "airtap -- test/*.js", - "test-browser-local": "airtap --local -- test/*.js", - "test-node": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/node_modules/slash/index.d.ts b/node_modules/slash/index.d.ts deleted file mode 100644 index 692c7fd..0000000 --- a/node_modules/slash/index.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -/** -Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar`. - -[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths. - -@param path - A Windows backslash path. -@returns A path with forward slashes. - -@example -``` -import path from 'node:path'; -import slash from 'slash'; - -const string = path.join('foo', 'bar'); -// Unix => foo/bar -// Windows => foo\\bar - -slash(string); -// Unix => foo/bar -// Windows => foo/bar -``` -*/ -export default function slash(path: string): string; diff --git a/node_modules/slash/index.js b/node_modules/slash/index.js deleted file mode 100644 index 1b7ee1e..0000000 --- a/node_modules/slash/index.js +++ /dev/null @@ -1,9 +0,0 @@ -export default function slash(path) { - const isExtendedLengthPath = path.startsWith('\\\\?\\'); - - if (isExtendedLengthPath) { - return path; - } - - return path.replace(/\\/g, '/'); -} diff --git a/node_modules/slash/license b/node_modules/slash/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/slash/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/slash/package.json b/node_modules/slash/package.json deleted file mode 100644 index a0f507c..0000000 --- a/node_modules/slash/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "slash", - "version": "5.1.0", - "description": "Convert Windows backslash paths to slash paths", - "license": "MIT", - "repository": "sindresorhus/slash", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": "./index.js", - "types": "./index.d.ts", - "engines": { - "node": ">=14.16" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "path", - "seperator", - "slash", - "backslash", - "windows", - "convert" - ], - "devDependencies": { - "ava": "^5.2.0", - "tsd": "^0.28.1", - "xo": "^0.54.2" - } -} diff --git a/node_modules/slash/readme.md b/node_modules/slash/readme.md deleted file mode 100644 index 42f74f9..0000000 --- a/node_modules/slash/readme.md +++ /dev/null @@ -1,36 +0,0 @@ -# slash - -> Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar` - -[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths. - -This was created since the `path` methods in Node.js outputs `\\` paths on Windows. - -## Install - -```sh -npm install slash -``` - -## Usage - -```js -import path from 'node:path'; -import slash from 'slash'; - -const string = path.join('foo', 'bar'); -// Unix => foo/bar -// Windows => foo\\bar - -slash(string); -// Unix => foo/bar -// Windows => foo/bar -``` - -## API - -### slash(path) - -Type: `string` - -Accepts a Windows backslash path and returns a path with forward slashes. diff --git a/node_modules/source-map-js/LICENSE b/node_modules/source-map-js/LICENSE deleted file mode 100644 index ed1b7cf..0000000 --- a/node_modules/source-map-js/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ - -Copyright (c) 2009-2011, Mozilla Foundation and contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the names of the Mozilla Foundation nor the names of project - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/source-map-js/README.md b/node_modules/source-map-js/README.md deleted file mode 100644 index 614962d..0000000 --- a/node_modules/source-map-js/README.md +++ /dev/null @@ -1,765 +0,0 @@ -# Source Map JS - -[![NPM](https://nodei.co/npm/source-map-js.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map-js) - -Difference between original [source-map](https://github.com/mozilla/source-map): - -> TL,DR: it's fork of original source-map@0.6, but with perfomance optimizations. - -This journey starts from [source-map@0.7.0](https://github.com/mozilla/source-map/blob/master/CHANGELOG.md#070). Some part of it was rewritten to Rust and WASM and API became async. - -It's still a major block for many libraries like PostCSS or Sass for example because they need to migrate the whole API to the async way. This is the reason why 0.6.1 has 2x more downloads than 0.7.3 while it's faster several times. - -![Downloads count](media/downloads.png) - -More important that WASM version has some optimizations in JS code too. This is why [community asked to create branch for 0.6 version](https://github.com/mozilla/source-map/issues/324) and port these optimizations but, sadly, the answer was «no». A bit later I discovered [the issue](https://github.com/mozilla/source-map/issues/370) created by [Ben Rothman (@benthemonkey)](https://github.com/benthemonkey) with no response at all. - -[Roman Dvornov (@lahmatiy)](https://github.com/lahmatiy) wrote a [serveral posts](https://t.me/gorshochekvarit/76) (russian, only, sorry) about source-map library in his own Telegram channel. He mentioned the article [«Maybe you don't need Rust and WASM to speed up your JS»](https://mrale.ph/blog/2018/02/03/maybe-you-dont-need-rust-to-speed-up-your-js.html) written by [Vyacheslav Egorov (@mraleph)](https://github.com/mraleph). This article contains optimizations and hacks that lead to almost the same performance compare to WASM implementation. - -I decided to fork the original source-map and port these optimizations from the article and several others PR from the original source-map. - ---------- - -This is a library to generate and consume the source map format -[described here][format]. - -[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit - -## Use with Node - - $ npm install source-map-js - - - --------------------------------------------------------------------------------- - - - - - -## Table of Contents - -- [Examples](#examples) - - [Consuming a source map](#consuming-a-source-map) - - [Generating a source map](#generating-a-source-map) - - [With SourceNode (high level API)](#with-sourcenode-high-level-api) - - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) -- [API](#api) - - [SourceMapConsumer](#sourcemapconsumer) - - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) - - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) - - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) - - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) - - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) - - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) - - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) - - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) - - [SourceMapGenerator](#sourcemapgenerator) - - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) - - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) - - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) - - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) - - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) - - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) - - [SourceNode](#sourcenode) - - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) - - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) - - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) - - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) - - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) - - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) - - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) - - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) - - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) - - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) - - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) - - - -## Examples - -### Consuming a source map - -```js -var rawSourceMap = { - version: 3, - file: 'min.js', - names: ['bar', 'baz', 'n'], - sources: ['one.js', 'two.js'], - sourceRoot: 'http://example.com/www/js/', - mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' -}; - -var smc = new SourceMapConsumer(rawSourceMap); - -console.log(smc.sources); -// [ 'http://example.com/www/js/one.js', -// 'http://example.com/www/js/two.js' ] - -console.log(smc.originalPositionFor({ - line: 2, - column: 28 -})); -// { source: 'http://example.com/www/js/two.js', -// line: 2, -// column: 10, -// name: 'n' } - -console.log(smc.generatedPositionFor({ - source: 'http://example.com/www/js/two.js', - line: 2, - column: 10 -})); -// { line: 2, column: 28 } - -smc.eachMapping(function (m) { - // ... -}); -``` - -### Generating a source map - -In depth guide: -[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) - -#### With SourceNode (high level API) - -```js -function compile(ast) { - switch (ast.type) { - case 'BinaryExpression': - return new SourceNode( - ast.location.line, - ast.location.column, - ast.location.source, - [compile(ast.left), " + ", compile(ast.right)] - ); - case 'Literal': - return new SourceNode( - ast.location.line, - ast.location.column, - ast.location.source, - String(ast.value) - ); - // ... - default: - throw new Error("Bad AST"); - } -} - -var ast = parse("40 + 2", "add.js"); -console.log(compile(ast).toStringWithSourceMap({ - file: 'add.js' -})); -// { code: '40 + 2', -// map: [object SourceMapGenerator] } -``` - -#### With SourceMapGenerator (low level API) - -```js -var map = new SourceMapGenerator({ - file: "source-mapped.js" -}); - -map.addMapping({ - generated: { - line: 10, - column: 35 - }, - source: "foo.js", - original: { - line: 33, - column: 2 - }, - name: "christopher" -}); - -console.log(map.toString()); -// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' -``` - -## API - -Get a reference to the module: - -```js -// Node.js -var sourceMap = require('source-map'); - -// Browser builds -var sourceMap = window.sourceMap; - -// Inside Firefox -const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); -``` - -### SourceMapConsumer - -A SourceMapConsumer instance represents a parsed source map which we can query -for information about the original file positions by giving it a file position -in the generated source. - -#### new SourceMapConsumer(rawSourceMap) - -The only parameter is the raw source map (either as a string which can be -`JSON.parse`'d, or an object). According to the spec, source maps have the -following attributes: - -* `version`: Which version of the source map spec this map is following. - -* `sources`: An array of URLs to the original source files. - -* `names`: An array of identifiers which can be referenced by individual - mappings. - -* `sourceRoot`: Optional. The URL root from which all sources are relative. - -* `sourcesContent`: Optional. An array of contents of the original source files. - -* `mappings`: A string of base64 VLQs which contain the actual mappings. - -* `file`: Optional. The generated filename this source map is associated with. - -```js -var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); -``` - -#### SourceMapConsumer.prototype.computeColumnSpans() - -Compute the last column for each generated mapping. The last column is -inclusive. - -```js -// Before: -consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) -// [ { line: 2, -// column: 1 }, -// { line: 2, -// column: 10 }, -// { line: 2, -// column: 20 } ] - -consumer.computeColumnSpans(); - -// After: -consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) -// [ { line: 2, -// column: 1, -// lastColumn: 9 }, -// { line: 2, -// column: 10, -// lastColumn: 19 }, -// { line: 2, -// column: 20, -// lastColumn: Infinity } ] - -``` - -#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) - -Returns the original source, line, and column information for the generated -source's line and column positions provided. The only argument is an object with -the following properties: - -* `line`: The line number in the generated source. Line numbers in - this library are 1-based (note that the underlying source map - specification uses 0-based line numbers -- this library handles the - translation). - -* `column`: The column number in the generated source. Column numbers - in this library are 0-based. - -* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or - `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest - element that is smaller than or greater than the one we are searching for, - respectively, if the exact element cannot be found. Defaults to - `SourceMapConsumer.GREATEST_LOWER_BOUND`. - -and an object is returned with the following properties: - -* `source`: The original source file, or null if this information is not - available. - -* `line`: The line number in the original source, or null if this information is - not available. The line number is 1-based. - -* `column`: The column number in the original source, or null if this - information is not available. The column number is 0-based. - -* `name`: The original identifier, or null if this information is not available. - -```js -consumer.originalPositionFor({ line: 2, column: 10 }) -// { source: 'foo.coffee', -// line: 2, -// column: 2, -// name: null } - -consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) -// { source: null, -// line: null, -// column: null, -// name: null } -``` - -#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) - -Returns the generated line and column information for the original source, -line, and column positions provided. The only argument is an object with -the following properties: - -* `source`: The filename of the original source. - -* `line`: The line number in the original source. The line number is - 1-based. - -* `column`: The column number in the original source. The column - number is 0-based. - -and an object is returned with the following properties: - -* `line`: The line number in the generated source, or null. The line - number is 1-based. - -* `column`: The column number in the generated source, or null. The - column number is 0-based. - -```js -consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) -// { line: 1, -// column: 56 } -``` - -#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) - -Returns all generated line and column information for the original source, line, -and column provided. If no column is provided, returns all mappings -corresponding to a either the line we are searching for or the next closest line -that has any mappings. Otherwise, returns all mappings corresponding to the -given line and either the column we are searching for or the next closest column -that has any offsets. - -The only argument is an object with the following properties: - -* `source`: The filename of the original source. - -* `line`: The line number in the original source. The line number is - 1-based. - -* `column`: Optional. The column number in the original source. The - column number is 0-based. - -and an array of objects is returned, each with the following properties: - -* `line`: The line number in the generated source, or null. The line - number is 1-based. - -* `column`: The column number in the generated source, or null. The - column number is 0-based. - -```js -consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) -// [ { line: 2, -// column: 1 }, -// { line: 2, -// column: 10 }, -// { line: 2, -// column: 20 } ] -``` - -#### SourceMapConsumer.prototype.hasContentsOfAllSources() - -Return true if we have the embedded source content for every source listed in -the source map, false otherwise. - -In other words, if this method returns `true`, then -`consumer.sourceContentFor(s)` will succeed for every source `s` in -`consumer.sources`. - -```js -// ... -if (consumer.hasContentsOfAllSources()) { - consumerReadyCallback(consumer); -} else { - fetchSources(consumer, consumerReadyCallback); -} -// ... -``` - -#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) - -Returns the original source content for the source provided. The only -argument is the URL of the original source file. - -If the source content for the given source is not found, then an error is -thrown. Optionally, pass `true` as the second param to have `null` returned -instead. - -```js -consumer.sources -// [ "my-cool-lib.clj" ] - -consumer.sourceContentFor("my-cool-lib.clj") -// "..." - -consumer.sourceContentFor("this is not in the source map"); -// Error: "this is not in the source map" is not in the source map - -consumer.sourceContentFor("this is not in the source map", true); -// null -``` - -#### SourceMapConsumer.prototype.eachMapping(callback, context, order) - -Iterate over each mapping between an original source/line/column and a -generated line/column in this source map. - -* `callback`: The function that is called with each mapping. Mappings have the - form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, - name }` - -* `context`: Optional. If specified, this object will be the value of `this` - every time that `callback` is called. - -* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or - `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over - the mappings sorted by the generated file's line/column order or the - original's source/line/column order, respectively. Defaults to - `SourceMapConsumer.GENERATED_ORDER`. - -```js -consumer.eachMapping(function (m) { console.log(m); }) -// ... -// { source: 'illmatic.js', -// generatedLine: 1, -// generatedColumn: 0, -// originalLine: 1, -// originalColumn: 0, -// name: null } -// { source: 'illmatic.js', -// generatedLine: 2, -// generatedColumn: 0, -// originalLine: 2, -// originalColumn: 0, -// name: null } -// ... -``` -### SourceMapGenerator - -An instance of the SourceMapGenerator represents a source map which is being -built incrementally. - -#### new SourceMapGenerator([startOfSourceMap]) - -You may pass an object with the following properties: - -* `file`: The filename of the generated source that this source map is - associated with. - -* `sourceRoot`: A root for all relative URLs in this source map. - -* `skipValidation`: Optional. When `true`, disables validation of mappings as - they are added. This can improve performance but should be used with - discretion, as a last resort. Even then, one should avoid using this flag when - running tests, if possible. - -* `ignoreInvalidMapping`: Optional. When `true`, instead of throwing error on - invalid mapping, it will be ignored. - -```js -var generator = new sourceMap.SourceMapGenerator({ - file: "my-generated-javascript-file.js", - sourceRoot: "http://example.com/app/js/" -}); -``` - -#### SourceMapGenerator.fromSourceMap(sourceMapConsumer, sourceMapGeneratorOptions) - -Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. - -* `sourceMapConsumer` The SourceMap. - -* `sourceMapGeneratorOptions` options that will be passed to the SourceMapGenerator constructor which used under the hood. - -```js -var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer, { - ignoreInvalidMapping: true, -}); -``` - -#### SourceMapGenerator.prototype.addMapping(mapping) - -Add a single mapping from original source line and column to the generated -source's line and column for this source map being created. The mapping object -should have the following properties: - -* `generated`: An object with the generated line and column positions. - -* `original`: An object with the original line and column positions. - -* `source`: The original source file (relative to the sourceRoot). - -* `name`: An optional original token name for this mapping. - -```js -generator.addMapping({ - source: "module-one.scm", - original: { line: 128, column: 0 }, - generated: { line: 3, column: 456 } -}) -``` - -#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) - -Set the source content for an original source file. - -* `sourceFile` the URL of the original source file. - -* `sourceContent` the content of the source file. - -```js -generator.setSourceContent("module-one.scm", - fs.readFileSync("path/to/module-one.scm")) -``` - -#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) - -Applies a SourceMap for a source file to the SourceMap. -Each mapping to the supplied source file is rewritten using the -supplied SourceMap. Note: The resolution for the resulting mappings -is the minimum of this map and the supplied map. - -* `sourceMapConsumer`: The SourceMap to be applied. - -* `sourceFile`: Optional. The filename of the source file. - If omitted, sourceMapConsumer.file will be used, if it exists. - Otherwise an error will be thrown. - -* `sourceMapPath`: Optional. The dirname of the path to the SourceMap - to be applied. If relative, it is relative to the SourceMap. - - This parameter is needed when the two SourceMaps aren't in the same - directory, and the SourceMap to be applied contains relative source - paths. If so, those relative source paths need to be rewritten - relative to the SourceMap. - - If omitted, it is assumed that both SourceMaps are in the same directory, - thus not needing any rewriting. (Supplying `'.'` has the same effect.) - -#### SourceMapGenerator.prototype.toString() - -Renders the source map being generated to a string. - -```js -generator.toString() -// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' -``` - -### SourceNode - -SourceNodes provide a way to abstract over interpolating and/or concatenating -snippets of generated JavaScript source code, while maintaining the line and -column information associated between those snippets and the original source -code. This is useful as the final intermediate representation a compiler might -use before outputting the generated JS and source map. - -#### new SourceNode([line, column, source[, chunk[, name]]]) - -* `line`: The original line number associated with this source node, or null if - it isn't associated with an original line. The line number is 1-based. - -* `column`: The original column number associated with this source node, or null - if it isn't associated with an original column. The column number - is 0-based. - -* `source`: The original source's filename; null if no filename is provided. - -* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see - below. - -* `name`: Optional. The original identifier. - -```js -var node = new SourceNode(1, 2, "a.cpp", [ - new SourceNode(3, 4, "b.cpp", "extern int status;\n"), - new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), - new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), -]); -``` - -#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) - -Creates a SourceNode from generated code and a SourceMapConsumer. - -* `code`: The generated code - -* `sourceMapConsumer` The SourceMap for the generated code - -* `relativePath` The optional path that relative sources in `sourceMapConsumer` - should be relative to. - -```js -var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map", "utf8")); -var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), - consumer); -``` - -#### SourceNode.prototype.add(chunk) - -Add a chunk of generated JS to this source node. - -* `chunk`: A string snippet of generated JS code, another instance of - `SourceNode`, or an array where each member is one of those things. - -```js -node.add(" + "); -node.add(otherNode); -node.add([leftHandOperandNode, " + ", rightHandOperandNode]); -``` - -#### SourceNode.prototype.prepend(chunk) - -Prepend a chunk of generated JS to this source node. - -* `chunk`: A string snippet of generated JS code, another instance of - `SourceNode`, or an array where each member is one of those things. - -```js -node.prepend("/** Build Id: f783haef86324gf **/\n\n"); -``` - -#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) - -Set the source content for a source file. This will be added to the -`SourceMap` in the `sourcesContent` field. - -* `sourceFile`: The filename of the source file - -* `sourceContent`: The content of the source file - -```js -node.setSourceContent("module-one.scm", - fs.readFileSync("path/to/module-one.scm")) -``` - -#### SourceNode.prototype.walk(fn) - -Walk over the tree of JS snippets in this node and its children. The walking -function is called once for each snippet of JS and is passed that snippet and -the its original associated source's line/column location. - -* `fn`: The traversal function. - -```js -var node = new SourceNode(1, 2, "a.js", [ - new SourceNode(3, 4, "b.js", "uno"), - "dos", - [ - "tres", - new SourceNode(5, 6, "c.js", "quatro") - ] -]); - -node.walk(function (code, loc) { console.log("WALK:", code, loc); }) -// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } -// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } -// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } -// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } -``` - -#### SourceNode.prototype.walkSourceContents(fn) - -Walk over the tree of SourceNodes. The walking function is called for each -source file content and is passed the filename and source content. - -* `fn`: The traversal function. - -```js -var a = new SourceNode(1, 2, "a.js", "generated from a"); -a.setSourceContent("a.js", "original a"); -var b = new SourceNode(1, 2, "b.js", "generated from b"); -b.setSourceContent("b.js", "original b"); -var c = new SourceNode(1, 2, "c.js", "generated from c"); -c.setSourceContent("c.js", "original c"); - -var node = new SourceNode(null, null, null, [a, b, c]); -node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) -// WALK: a.js : original a -// WALK: b.js : original b -// WALK: c.js : original c -``` - -#### SourceNode.prototype.join(sep) - -Like `Array.prototype.join` except for SourceNodes. Inserts the separator -between each of this source node's children. - -* `sep`: The separator. - -```js -var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); -var operand = new SourceNode(3, 4, "a.rs", "="); -var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); - -var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); -var joinedNode = node.join(" "); -``` - -#### SourceNode.prototype.replaceRight(pattern, replacement) - -Call `String.prototype.replace` on the very right-most source snippet. Useful -for trimming white space from the end of a source node, etc. - -* `pattern`: The pattern to replace. - -* `replacement`: The thing to replace the pattern with. - -```js -// Trim trailing white space. -node.replaceRight(/\s*$/, ""); -``` - -#### SourceNode.prototype.toString() - -Return the string representation of this source node. Walks over the tree and -concatenates all the various snippets together to one string. - -```js -var node = new SourceNode(1, 2, "a.js", [ - new SourceNode(3, 4, "b.js", "uno"), - "dos", - [ - "tres", - new SourceNode(5, 6, "c.js", "quatro") - ] -]); - -node.toString() -// 'unodostresquatro' -``` - -#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) - -Returns the string representation of this tree of source nodes, plus a -SourceMapGenerator which contains all the mappings between the generated and -original sources. - -The arguments are the same as those to `new SourceMapGenerator`. - -```js -var node = new SourceNode(1, 2, "a.js", [ - new SourceNode(3, 4, "b.js", "uno"), - "dos", - [ - "tres", - new SourceNode(5, 6, "c.js", "quatro") - ] -]); - -node.toStringWithSourceMap({ file: "my-output-file.js" }) -// { code: 'unodostresquatro', -// map: [object SourceMapGenerator] } -``` diff --git a/node_modules/source-map-js/lib/array-set.js b/node_modules/source-map-js/lib/array-set.js deleted file mode 100644 index fbd5c81..0000000 --- a/node_modules/source-map-js/lib/array-set.js +++ /dev/null @@ -1,121 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var util = require('./util'); -var has = Object.prototype.hasOwnProperty; -var hasNativeMap = typeof Map !== "undefined"; - -/** - * A data structure which is a combination of an array and a set. Adding a new - * member is O(1), testing for membership is O(1), and finding the index of an - * element is O(1). Removing elements from the set is not supported. Only - * strings are supported for membership. - */ -function ArraySet() { - this._array = []; - this._set = hasNativeMap ? new Map() : Object.create(null); -} - -/** - * Static method for creating ArraySet instances from an existing array. - */ -ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { - var set = new ArraySet(); - for (var i = 0, len = aArray.length; i < len; i++) { - set.add(aArray[i], aAllowDuplicates); - } - return set; -}; - -/** - * Return how many unique items are in this ArraySet. If duplicates have been - * added, than those do not count towards the size. - * - * @returns Number - */ -ArraySet.prototype.size = function ArraySet_size() { - return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; -}; - -/** - * Add the given string to this set. - * - * @param String aStr - */ -ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { - var sStr = hasNativeMap ? aStr : util.toSetString(aStr); - var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); - var idx = this._array.length; - if (!isDuplicate || aAllowDuplicates) { - this._array.push(aStr); - } - if (!isDuplicate) { - if (hasNativeMap) { - this._set.set(aStr, idx); - } else { - this._set[sStr] = idx; - } - } -}; - -/** - * Is the given string a member of this set? - * - * @param String aStr - */ -ArraySet.prototype.has = function ArraySet_has(aStr) { - if (hasNativeMap) { - return this._set.has(aStr); - } else { - var sStr = util.toSetString(aStr); - return has.call(this._set, sStr); - } -}; - -/** - * What is the index of the given string in the array? - * - * @param String aStr - */ -ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { - if (hasNativeMap) { - var idx = this._set.get(aStr); - if (idx >= 0) { - return idx; - } - } else { - var sStr = util.toSetString(aStr); - if (has.call(this._set, sStr)) { - return this._set[sStr]; - } - } - - throw new Error('"' + aStr + '" is not in the set.'); -}; - -/** - * What is the element at the given index? - * - * @param Number aIdx - */ -ArraySet.prototype.at = function ArraySet_at(aIdx) { - if (aIdx >= 0 && aIdx < this._array.length) { - return this._array[aIdx]; - } - throw new Error('No element indexed by ' + aIdx); -}; - -/** - * Returns the array representation of this set (which has the proper indices - * indicated by indexOf). Note that this is a copy of the internal array used - * for storing the members so that no one can mess with internal state. - */ -ArraySet.prototype.toArray = function ArraySet_toArray() { - return this._array.slice(); -}; - -exports.ArraySet = ArraySet; diff --git a/node_modules/source-map-js/lib/base64-vlq.js b/node_modules/source-map-js/lib/base64-vlq.js deleted file mode 100644 index 612b404..0000000 --- a/node_modules/source-map-js/lib/base64-vlq.js +++ /dev/null @@ -1,140 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - * - * Based on the Base 64 VLQ implementation in Closure Compiler: - * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java - * - * Copyright 2011 The Closure Compiler Authors. All rights reserved. - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are - * met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials provided - * with the distribution. - * * Neither the name of Google Inc. nor the names of its - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -var base64 = require('./base64'); - -// A single base 64 digit can contain 6 bits of data. For the base 64 variable -// length quantities we use in the source map spec, the first bit is the sign, -// the next four bits are the actual value, and the 6th bit is the -// continuation bit. The continuation bit tells us whether there are more -// digits in this value following this digit. -// -// Continuation -// | Sign -// | | -// V V -// 101011 - -var VLQ_BASE_SHIFT = 5; - -// binary: 100000 -var VLQ_BASE = 1 << VLQ_BASE_SHIFT; - -// binary: 011111 -var VLQ_BASE_MASK = VLQ_BASE - 1; - -// binary: 100000 -var VLQ_CONTINUATION_BIT = VLQ_BASE; - -/** - * Converts from a two-complement value to a value where the sign bit is - * placed in the least significant bit. For example, as decimals: - * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) - * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) - */ -function toVLQSigned(aValue) { - return aValue < 0 - ? ((-aValue) << 1) + 1 - : (aValue << 1) + 0; -} - -/** - * Converts to a two-complement value from a value where the sign bit is - * placed in the least significant bit. For example, as decimals: - * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 - * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 - */ -function fromVLQSigned(aValue) { - var isNegative = (aValue & 1) === 1; - var shifted = aValue >> 1; - return isNegative - ? -shifted - : shifted; -} - -/** - * Returns the base 64 VLQ encoded value. - */ -exports.encode = function base64VLQ_encode(aValue) { - var encoded = ""; - var digit; - - var vlq = toVLQSigned(aValue); - - do { - digit = vlq & VLQ_BASE_MASK; - vlq >>>= VLQ_BASE_SHIFT; - if (vlq > 0) { - // There are still more digits in this value, so we must make sure the - // continuation bit is marked. - digit |= VLQ_CONTINUATION_BIT; - } - encoded += base64.encode(digit); - } while (vlq > 0); - - return encoded; -}; - -/** - * Decodes the next base 64 VLQ value from the given string and returns the - * value and the rest of the string via the out parameter. - */ -exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { - var strLen = aStr.length; - var result = 0; - var shift = 0; - var continuation, digit; - - do { - if (aIndex >= strLen) { - throw new Error("Expected more digits in base 64 VLQ value."); - } - - digit = base64.decode(aStr.charCodeAt(aIndex++)); - if (digit === -1) { - throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); - } - - continuation = !!(digit & VLQ_CONTINUATION_BIT); - digit &= VLQ_BASE_MASK; - result = result + (digit << shift); - shift += VLQ_BASE_SHIFT; - } while (continuation); - - aOutParam.value = fromVLQSigned(result); - aOutParam.rest = aIndex; -}; diff --git a/node_modules/source-map-js/lib/base64.js b/node_modules/source-map-js/lib/base64.js deleted file mode 100644 index 8aa86b3..0000000 --- a/node_modules/source-map-js/lib/base64.js +++ /dev/null @@ -1,67 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); - -/** - * Encode an integer in the range of 0 to 63 to a single base 64 digit. - */ -exports.encode = function (number) { - if (0 <= number && number < intToCharMap.length) { - return intToCharMap[number]; - } - throw new TypeError("Must be between 0 and 63: " + number); -}; - -/** - * Decode a single base 64 character code digit to an integer. Returns -1 on - * failure. - */ -exports.decode = function (charCode) { - var bigA = 65; // 'A' - var bigZ = 90; // 'Z' - - var littleA = 97; // 'a' - var littleZ = 122; // 'z' - - var zero = 48; // '0' - var nine = 57; // '9' - - var plus = 43; // '+' - var slash = 47; // '/' - - var littleOffset = 26; - var numberOffset = 52; - - // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ - if (bigA <= charCode && charCode <= bigZ) { - return (charCode - bigA); - } - - // 26 - 51: abcdefghijklmnopqrstuvwxyz - if (littleA <= charCode && charCode <= littleZ) { - return (charCode - littleA + littleOffset); - } - - // 52 - 61: 0123456789 - if (zero <= charCode && charCode <= nine) { - return (charCode - zero + numberOffset); - } - - // 62: + - if (charCode == plus) { - return 62; - } - - // 63: / - if (charCode == slash) { - return 63; - } - - // Invalid base64 digit. - return -1; -}; diff --git a/node_modules/source-map-js/lib/binary-search.js b/node_modules/source-map-js/lib/binary-search.js deleted file mode 100644 index 010ac94..0000000 --- a/node_modules/source-map-js/lib/binary-search.js +++ /dev/null @@ -1,111 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -exports.GREATEST_LOWER_BOUND = 1; -exports.LEAST_UPPER_BOUND = 2; - -/** - * Recursive implementation of binary search. - * - * @param aLow Indices here and lower do not contain the needle. - * @param aHigh Indices here and higher do not contain the needle. - * @param aNeedle The element being searched for. - * @param aHaystack The non-empty array being searched. - * @param aCompare Function which takes two elements and returns -1, 0, or 1. - * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or - * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the - * closest element that is smaller than or greater than the one we are - * searching for, respectively, if the exact element cannot be found. - */ -function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { - // This function terminates when one of the following is true: - // - // 1. We find the exact element we are looking for. - // - // 2. We did not find the exact element, but we can return the index of - // the next-closest element. - // - // 3. We did not find the exact element, and there is no next-closest - // element than the one we are searching for, so we return -1. - var mid = Math.floor((aHigh - aLow) / 2) + aLow; - var cmp = aCompare(aNeedle, aHaystack[mid], true); - if (cmp === 0) { - // Found the element we are looking for. - return mid; - } - else if (cmp > 0) { - // Our needle is greater than aHaystack[mid]. - if (aHigh - mid > 1) { - // The element is in the upper half. - return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); - } - - // The exact needle element was not found in this haystack. Determine if - // we are in termination case (3) or (2) and return the appropriate thing. - if (aBias == exports.LEAST_UPPER_BOUND) { - return aHigh < aHaystack.length ? aHigh : -1; - } else { - return mid; - } - } - else { - // Our needle is less than aHaystack[mid]. - if (mid - aLow > 1) { - // The element is in the lower half. - return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); - } - - // we are in termination case (3) or (2) and return the appropriate thing. - if (aBias == exports.LEAST_UPPER_BOUND) { - return mid; - } else { - return aLow < 0 ? -1 : aLow; - } - } -} - -/** - * This is an implementation of binary search which will always try and return - * the index of the closest element if there is no exact hit. This is because - * mappings between original and generated line/col pairs are single points, - * and there is an implicit region between each of them, so a miss just means - * that you aren't on the very start of a region. - * - * @param aNeedle The element you are looking for. - * @param aHaystack The array that is being searched. - * @param aCompare A function which takes the needle and an element in the - * array and returns -1, 0, or 1 depending on whether the needle is less - * than, equal to, or greater than the element, respectively. - * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or - * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the - * closest element that is smaller than or greater than the one we are - * searching for, respectively, if the exact element cannot be found. - * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. - */ -exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { - if (aHaystack.length === 0) { - return -1; - } - - var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, - aCompare, aBias || exports.GREATEST_LOWER_BOUND); - if (index < 0) { - return -1; - } - - // We have found either the exact element, or the next-closest element than - // the one we are searching for. However, there may be more than one such - // element. Make sure we always return the smallest of these. - while (index - 1 >= 0) { - if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { - break; - } - --index; - } - - return index; -}; diff --git a/node_modules/source-map-js/lib/mapping-list.js b/node_modules/source-map-js/lib/mapping-list.js deleted file mode 100644 index 06d1274..0000000 --- a/node_modules/source-map-js/lib/mapping-list.js +++ /dev/null @@ -1,79 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2014 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var util = require('./util'); - -/** - * Determine whether mappingB is after mappingA with respect to generated - * position. - */ -function generatedPositionAfter(mappingA, mappingB) { - // Optimized for most common case - var lineA = mappingA.generatedLine; - var lineB = mappingB.generatedLine; - var columnA = mappingA.generatedColumn; - var columnB = mappingB.generatedColumn; - return lineB > lineA || lineB == lineA && columnB >= columnA || - util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; -} - -/** - * A data structure to provide a sorted view of accumulated mappings in a - * performance conscious manner. It trades a neglibable overhead in general - * case for a large speedup in case of mappings being added in order. - */ -function MappingList() { - this._array = []; - this._sorted = true; - // Serves as infimum - this._last = {generatedLine: -1, generatedColumn: 0}; -} - -/** - * Iterate through internal items. This method takes the same arguments that - * `Array.prototype.forEach` takes. - * - * NOTE: The order of the mappings is NOT guaranteed. - */ -MappingList.prototype.unsortedForEach = - function MappingList_forEach(aCallback, aThisArg) { - this._array.forEach(aCallback, aThisArg); - }; - -/** - * Add the given source mapping. - * - * @param Object aMapping - */ -MappingList.prototype.add = function MappingList_add(aMapping) { - if (generatedPositionAfter(this._last, aMapping)) { - this._last = aMapping; - this._array.push(aMapping); - } else { - this._sorted = false; - this._array.push(aMapping); - } -}; - -/** - * Returns the flat, sorted array of mappings. The mappings are sorted by - * generated position. - * - * WARNING: This method returns internal data without copying, for - * performance. The return value must NOT be mutated, and should be treated as - * an immutable borrow. If you want to take ownership, you must make your own - * copy. - */ -MappingList.prototype.toArray = function MappingList_toArray() { - if (!this._sorted) { - this._array.sort(util.compareByGeneratedPositionsInflated); - this._sorted = true; - } - return this._array; -}; - -exports.MappingList = MappingList; diff --git a/node_modules/source-map-js/lib/quick-sort.js b/node_modules/source-map-js/lib/quick-sort.js deleted file mode 100644 index 23f9eda..0000000 --- a/node_modules/source-map-js/lib/quick-sort.js +++ /dev/null @@ -1,132 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -// It turns out that some (most?) JavaScript engines don't self-host -// `Array.prototype.sort`. This makes sense because C++ will likely remain -// faster than JS when doing raw CPU-intensive sorting. However, when using a -// custom comparator function, calling back and forth between the VM's C++ and -// JIT'd JS is rather slow *and* loses JIT type information, resulting in -// worse generated code for the comparator function than would be optimal. In -// fact, when sorting with a comparator, these costs outweigh the benefits of -// sorting in C++. By using our own JS-implemented Quick Sort (below), we get -// a ~3500ms mean speed-up in `bench/bench.html`. - -function SortTemplate(comparator) { - -/** - * Swap the elements indexed by `x` and `y` in the array `ary`. - * - * @param {Array} ary - * The array. - * @param {Number} x - * The index of the first item. - * @param {Number} y - * The index of the second item. - */ -function swap(ary, x, y) { - var temp = ary[x]; - ary[x] = ary[y]; - ary[y] = temp; -} - -/** - * Returns a random integer within the range `low .. high` inclusive. - * - * @param {Number} low - * The lower bound on the range. - * @param {Number} high - * The upper bound on the range. - */ -function randomIntInRange(low, high) { - return Math.round(low + (Math.random() * (high - low))); -} - -/** - * The Quick Sort algorithm. - * - * @param {Array} ary - * An array to sort. - * @param {function} comparator - * Function to use to compare two items. - * @param {Number} p - * Start index of the array - * @param {Number} r - * End index of the array - */ -function doQuickSort(ary, comparator, p, r) { - // If our lower bound is less than our upper bound, we (1) partition the - // array into two pieces and (2) recurse on each half. If it is not, this is - // the empty array and our base case. - - if (p < r) { - // (1) Partitioning. - // - // The partitioning chooses a pivot between `p` and `r` and moves all - // elements that are less than or equal to the pivot to the before it, and - // all the elements that are greater than it after it. The effect is that - // once partition is done, the pivot is in the exact place it will be when - // the array is put in sorted order, and it will not need to be moved - // again. This runs in O(n) time. - - // Always choose a random pivot so that an input array which is reverse - // sorted does not cause O(n^2) running time. - var pivotIndex = randomIntInRange(p, r); - var i = p - 1; - - swap(ary, pivotIndex, r); - var pivot = ary[r]; - - // Immediately after `j` is incremented in this loop, the following hold - // true: - // - // * Every element in `ary[p .. i]` is less than or equal to the pivot. - // - // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. - for (var j = p; j < r; j++) { - if (comparator(ary[j], pivot, false) <= 0) { - i += 1; - swap(ary, i, j); - } - } - - swap(ary, i + 1, j); - var q = i + 1; - - // (2) Recurse on each half. - - doQuickSort(ary, comparator, p, q - 1); - doQuickSort(ary, comparator, q + 1, r); - } -} - - return doQuickSort; -} - -function cloneSort(comparator) { - let template = SortTemplate.toString(); - let templateFn = new Function(`return ${template}`)(); - return templateFn(comparator); -} - -/** - * Sort the given array in-place with the given comparator function. - * - * @param {Array} ary - * An array to sort. - * @param {function} comparator - * Function to use to compare two items. - */ - -let sortCache = new WeakMap(); -exports.quickSort = function (ary, comparator, start = 0) { - let doQuickSort = sortCache.get(comparator); - if (doQuickSort === void 0) { - doQuickSort = cloneSort(comparator); - sortCache.set(comparator, doQuickSort); - } - doQuickSort(ary, comparator, start, ary.length - 1); -}; diff --git a/node_modules/source-map-js/lib/source-map-consumer.d.ts b/node_modules/source-map-js/lib/source-map-consumer.d.ts deleted file mode 100644 index 744bda7..0000000 --- a/node_modules/source-map-js/lib/source-map-consumer.d.ts +++ /dev/null @@ -1 +0,0 @@ -export { SourceMapConsumer } from '..'; diff --git a/node_modules/source-map-js/lib/source-map-consumer.js b/node_modules/source-map-js/lib/source-map-consumer.js deleted file mode 100644 index ee66114..0000000 --- a/node_modules/source-map-js/lib/source-map-consumer.js +++ /dev/null @@ -1,1188 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var util = require('./util'); -var binarySearch = require('./binary-search'); -var ArraySet = require('./array-set').ArraySet; -var base64VLQ = require('./base64-vlq'); -var quickSort = require('./quick-sort').quickSort; - -function SourceMapConsumer(aSourceMap, aSourceMapURL) { - var sourceMap = aSourceMap; - if (typeof aSourceMap === 'string') { - sourceMap = util.parseSourceMapInput(aSourceMap); - } - - return sourceMap.sections != null - ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) - : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); -} - -SourceMapConsumer.fromSourceMap = function(aSourceMap, aSourceMapURL) { - return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); -} - -/** - * The version of the source mapping spec that we are consuming. - */ -SourceMapConsumer.prototype._version = 3; - -// `__generatedMappings` and `__originalMappings` are arrays that hold the -// parsed mapping coordinates from the source map's "mappings" attribute. They -// are lazily instantiated, accessed via the `_generatedMappings` and -// `_originalMappings` getters respectively, and we only parse the mappings -// and create these arrays once queried for a source location. We jump through -// these hoops because there can be many thousands of mappings, and parsing -// them is expensive, so we only want to do it if we must. -// -// Each object in the arrays is of the form: -// -// { -// generatedLine: The line number in the generated code, -// generatedColumn: The column number in the generated code, -// source: The path to the original source file that generated this -// chunk of code, -// originalLine: The line number in the original source that -// corresponds to this chunk of generated code, -// originalColumn: The column number in the original source that -// corresponds to this chunk of generated code, -// name: The name of the original symbol which generated this chunk of -// code. -// } -// -// All properties except for `generatedLine` and `generatedColumn` can be -// `null`. -// -// `_generatedMappings` is ordered by the generated positions. -// -// `_originalMappings` is ordered by the original positions. - -SourceMapConsumer.prototype.__generatedMappings = null; -Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { - configurable: true, - enumerable: true, - get: function () { - if (!this.__generatedMappings) { - this._parseMappings(this._mappings, this.sourceRoot); - } - - return this.__generatedMappings; - } -}); - -SourceMapConsumer.prototype.__originalMappings = null; -Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { - configurable: true, - enumerable: true, - get: function () { - if (!this.__originalMappings) { - this._parseMappings(this._mappings, this.sourceRoot); - } - - return this.__originalMappings; - } -}); - -SourceMapConsumer.prototype._charIsMappingSeparator = - function SourceMapConsumer_charIsMappingSeparator(aStr, index) { - var c = aStr.charAt(index); - return c === ";" || c === ","; - }; - -/** - * Parse the mappings in a string in to a data structure which we can easily - * query (the ordered arrays in the `this.__generatedMappings` and - * `this.__originalMappings` properties). - */ -SourceMapConsumer.prototype._parseMappings = - function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { - throw new Error("Subclasses must implement _parseMappings"); - }; - -SourceMapConsumer.GENERATED_ORDER = 1; -SourceMapConsumer.ORIGINAL_ORDER = 2; - -SourceMapConsumer.GREATEST_LOWER_BOUND = 1; -SourceMapConsumer.LEAST_UPPER_BOUND = 2; - -/** - * Iterate over each mapping between an original source/line/column and a - * generated line/column in this source map. - * - * @param Function aCallback - * The function that is called with each mapping. - * @param Object aContext - * Optional. If specified, this object will be the value of `this` every - * time that `aCallback` is called. - * @param aOrder - * Either `SourceMapConsumer.GENERATED_ORDER` or - * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to - * iterate over the mappings sorted by the generated file's line/column - * order or the original's source/line/column order, respectively. Defaults to - * `SourceMapConsumer.GENERATED_ORDER`. - */ -SourceMapConsumer.prototype.eachMapping = - function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { - var context = aContext || null; - var order = aOrder || SourceMapConsumer.GENERATED_ORDER; - - var mappings; - switch (order) { - case SourceMapConsumer.GENERATED_ORDER: - mappings = this._generatedMappings; - break; - case SourceMapConsumer.ORIGINAL_ORDER: - mappings = this._originalMappings; - break; - default: - throw new Error("Unknown order of iteration."); - } - - var sourceRoot = this.sourceRoot; - var boundCallback = aCallback.bind(context); - var names = this._names; - var sources = this._sources; - var sourceMapURL = this._sourceMapURL; - - for (var i = 0, n = mappings.length; i < n; i++) { - var mapping = mappings[i]; - var source = mapping.source === null ? null : sources.at(mapping.source); - if(source !== null) { - source = util.computeSourceURL(sourceRoot, source, sourceMapURL); - } - boundCallback({ - source: source, - generatedLine: mapping.generatedLine, - generatedColumn: mapping.generatedColumn, - originalLine: mapping.originalLine, - originalColumn: mapping.originalColumn, - name: mapping.name === null ? null : names.at(mapping.name) - }); - } - }; - -/** - * Returns all generated line and column information for the original source, - * line, and column provided. If no column is provided, returns all mappings - * corresponding to a either the line we are searching for or the next - * closest line that has any mappings. Otherwise, returns all mappings - * corresponding to the given line and either the column we are searching for - * or the next closest column that has any offsets. - * - * The only argument is an object with the following properties: - * - * - source: The filename of the original source. - * - line: The line number in the original source. The line number is 1-based. - * - column: Optional. the column number in the original source. - * The column number is 0-based. - * - * and an array of objects is returned, each with the following properties: - * - * - line: The line number in the generated source, or null. The - * line number is 1-based. - * - column: The column number in the generated source, or null. - * The column number is 0-based. - */ -SourceMapConsumer.prototype.allGeneratedPositionsFor = - function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { - var line = util.getArg(aArgs, 'line'); - - // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping - // returns the index of the closest mapping less than the needle. By - // setting needle.originalColumn to 0, we thus find the last mapping for - // the given line, provided such a mapping exists. - var needle = { - source: util.getArg(aArgs, 'source'), - originalLine: line, - originalColumn: util.getArg(aArgs, 'column', 0) - }; - - needle.source = this._findSourceIndex(needle.source); - if (needle.source < 0) { - return []; - } - - var mappings = []; - - var index = this._findMapping(needle, - this._originalMappings, - "originalLine", - "originalColumn", - util.compareByOriginalPositions, - binarySearch.LEAST_UPPER_BOUND); - if (index >= 0) { - var mapping = this._originalMappings[index]; - - if (aArgs.column === undefined) { - var originalLine = mapping.originalLine; - - // Iterate until either we run out of mappings, or we run into - // a mapping for a different line than the one we found. Since - // mappings are sorted, this is guaranteed to find all mappings for - // the line we found. - while (mapping && mapping.originalLine === originalLine) { - mappings.push({ - line: util.getArg(mapping, 'generatedLine', null), - column: util.getArg(mapping, 'generatedColumn', null), - lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) - }); - - mapping = this._originalMappings[++index]; - } - } else { - var originalColumn = mapping.originalColumn; - - // Iterate until either we run out of mappings, or we run into - // a mapping for a different line than the one we were searching for. - // Since mappings are sorted, this is guaranteed to find all mappings for - // the line we are searching for. - while (mapping && - mapping.originalLine === line && - mapping.originalColumn == originalColumn) { - mappings.push({ - line: util.getArg(mapping, 'generatedLine', null), - column: util.getArg(mapping, 'generatedColumn', null), - lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) - }); - - mapping = this._originalMappings[++index]; - } - } - } - - return mappings; - }; - -exports.SourceMapConsumer = SourceMapConsumer; - -/** - * A BasicSourceMapConsumer instance represents a parsed source map which we can - * query for information about the original file positions by giving it a file - * position in the generated source. - * - * The first parameter is the raw source map (either as a JSON string, or - * already parsed to an object). According to the spec, source maps have the - * following attributes: - * - * - version: Which version of the source map spec this map is following. - * - sources: An array of URLs to the original source files. - * - names: An array of identifiers which can be referrenced by individual mappings. - * - sourceRoot: Optional. The URL root from which all sources are relative. - * - sourcesContent: Optional. An array of contents of the original source files. - * - mappings: A string of base64 VLQs which contain the actual mappings. - * - file: Optional. The generated file this source map is associated with. - * - * Here is an example source map, taken from the source map spec[0]: - * - * { - * version : 3, - * file: "out.js", - * sourceRoot : "", - * sources: ["foo.js", "bar.js"], - * names: ["src", "maps", "are", "fun"], - * mappings: "AA,AB;;ABCDE;" - * } - * - * The second parameter, if given, is a string whose value is the URL - * at which the source map was found. This URL is used to compute the - * sources array. - * - * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# - */ -function BasicSourceMapConsumer(aSourceMap, aSourceMapURL) { - var sourceMap = aSourceMap; - if (typeof aSourceMap === 'string') { - sourceMap = util.parseSourceMapInput(aSourceMap); - } - - var version = util.getArg(sourceMap, 'version'); - var sources = util.getArg(sourceMap, 'sources'); - // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which - // requires the array) to play nice here. - var names = util.getArg(sourceMap, 'names', []); - var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); - var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); - var mappings = util.getArg(sourceMap, 'mappings'); - var file = util.getArg(sourceMap, 'file', null); - - // Once again, Sass deviates from the spec and supplies the version as a - // string rather than a number, so we use loose equality checking here. - if (version != this._version) { - throw new Error('Unsupported version: ' + version); - } - - if (sourceRoot) { - sourceRoot = util.normalize(sourceRoot); - } - - sources = sources - .map(String) - // Some source maps produce relative source paths like "./foo.js" instead of - // "foo.js". Normalize these first so that future comparisons will succeed. - // See bugzil.la/1090768. - .map(util.normalize) - // Always ensure that absolute sources are internally stored relative to - // the source root, if the source root is absolute. Not doing this would - // be particularly problematic when the source root is a prefix of the - // source (valid, but why??). See github issue #199 and bugzil.la/1188982. - .map(function (source) { - return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) - ? util.relative(sourceRoot, source) - : source; - }); - - // Pass `true` below to allow duplicate names and sources. While source maps - // are intended to be compressed and deduplicated, the TypeScript compiler - // sometimes generates source maps with duplicates in them. See Github issue - // #72 and bugzil.la/889492. - this._names = ArraySet.fromArray(names.map(String), true); - this._sources = ArraySet.fromArray(sources, true); - - this._absoluteSources = this._sources.toArray().map(function (s) { - return util.computeSourceURL(sourceRoot, s, aSourceMapURL); - }); - - this.sourceRoot = sourceRoot; - this.sourcesContent = sourcesContent; - this._mappings = mappings; - this._sourceMapURL = aSourceMapURL; - this.file = file; -} - -BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); -BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; - -/** - * Utility function to find the index of a source. Returns -1 if not - * found. - */ -BasicSourceMapConsumer.prototype._findSourceIndex = function(aSource) { - var relativeSource = aSource; - if (this.sourceRoot != null) { - relativeSource = util.relative(this.sourceRoot, relativeSource); - } - - if (this._sources.has(relativeSource)) { - return this._sources.indexOf(relativeSource); - } - - // Maybe aSource is an absolute URL as returned by |sources|. In - // this case we can't simply undo the transform. - var i; - for (i = 0; i < this._absoluteSources.length; ++i) { - if (this._absoluteSources[i] == aSource) { - return i; - } - } - - return -1; -}; - -/** - * Create a BasicSourceMapConsumer from a SourceMapGenerator. - * - * @param SourceMapGenerator aSourceMap - * The source map that will be consumed. - * @param String aSourceMapURL - * The URL at which the source map can be found (optional) - * @returns BasicSourceMapConsumer - */ -BasicSourceMapConsumer.fromSourceMap = - function SourceMapConsumer_fromSourceMap(aSourceMap, aSourceMapURL) { - var smc = Object.create(BasicSourceMapConsumer.prototype); - - var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); - var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); - smc.sourceRoot = aSourceMap._sourceRoot; - smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), - smc.sourceRoot); - smc.file = aSourceMap._file; - smc._sourceMapURL = aSourceMapURL; - smc._absoluteSources = smc._sources.toArray().map(function (s) { - return util.computeSourceURL(smc.sourceRoot, s, aSourceMapURL); - }); - - // Because we are modifying the entries (by converting string sources and - // names to indices into the sources and names ArraySets), we have to make - // a copy of the entry or else bad things happen. Shared mutable state - // strikes again! See github issue #191. - - var generatedMappings = aSourceMap._mappings.toArray().slice(); - var destGeneratedMappings = smc.__generatedMappings = []; - var destOriginalMappings = smc.__originalMappings = []; - - for (var i = 0, length = generatedMappings.length; i < length; i++) { - var srcMapping = generatedMappings[i]; - var destMapping = new Mapping; - destMapping.generatedLine = srcMapping.generatedLine; - destMapping.generatedColumn = srcMapping.generatedColumn; - - if (srcMapping.source) { - destMapping.source = sources.indexOf(srcMapping.source); - destMapping.originalLine = srcMapping.originalLine; - destMapping.originalColumn = srcMapping.originalColumn; - - if (srcMapping.name) { - destMapping.name = names.indexOf(srcMapping.name); - } - - destOriginalMappings.push(destMapping); - } - - destGeneratedMappings.push(destMapping); - } - - quickSort(smc.__originalMappings, util.compareByOriginalPositions); - - return smc; - }; - -/** - * The version of the source mapping spec that we are consuming. - */ -BasicSourceMapConsumer.prototype._version = 3; - -/** - * The list of original sources. - */ -Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { - get: function () { - return this._absoluteSources.slice(); - } -}); - -/** - * Provide the JIT with a nice shape / hidden class. - */ -function Mapping() { - this.generatedLine = 0; - this.generatedColumn = 0; - this.source = null; - this.originalLine = null; - this.originalColumn = null; - this.name = null; -} - -/** - * Parse the mappings in a string in to a data structure which we can easily - * query (the ordered arrays in the `this.__generatedMappings` and - * `this.__originalMappings` properties). - */ - -const compareGenerated = util.compareByGeneratedPositionsDeflatedNoLine; -function sortGenerated(array, start) { - let l = array.length; - let n = array.length - start; - if (n <= 1) { - return; - } else if (n == 2) { - let a = array[start]; - let b = array[start + 1]; - if (compareGenerated(a, b) > 0) { - array[start] = b; - array[start + 1] = a; - } - } else if (n < 20) { - for (let i = start; i < l; i++) { - for (let j = i; j > start; j--) { - let a = array[j - 1]; - let b = array[j]; - if (compareGenerated(a, b) <= 0) { - break; - } - array[j - 1] = b; - array[j] = a; - } - } - } else { - quickSort(array, compareGenerated, start); - } -} -BasicSourceMapConsumer.prototype._parseMappings = - function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { - var generatedLine = 1; - var previousGeneratedColumn = 0; - var previousOriginalLine = 0; - var previousOriginalColumn = 0; - var previousSource = 0; - var previousName = 0; - var length = aStr.length; - var index = 0; - var cachedSegments = {}; - var temp = {}; - var originalMappings = []; - var generatedMappings = []; - var mapping, str, segment, end, value; - - let subarrayStart = 0; - while (index < length) { - if (aStr.charAt(index) === ';') { - generatedLine++; - index++; - previousGeneratedColumn = 0; - - sortGenerated(generatedMappings, subarrayStart); - subarrayStart = generatedMappings.length; - } - else if (aStr.charAt(index) === ',') { - index++; - } - else { - mapping = new Mapping(); - mapping.generatedLine = generatedLine; - - for (end = index; end < length; end++) { - if (this._charIsMappingSeparator(aStr, end)) { - break; - } - } - str = aStr.slice(index, end); - - segment = []; - while (index < end) { - base64VLQ.decode(aStr, index, temp); - value = temp.value; - index = temp.rest; - segment.push(value); - } - - if (segment.length === 2) { - throw new Error('Found a source, but no line and column'); - } - - if (segment.length === 3) { - throw new Error('Found a source and line, but no column'); - } - - // Generated column. - mapping.generatedColumn = previousGeneratedColumn + segment[0]; - previousGeneratedColumn = mapping.generatedColumn; - - if (segment.length > 1) { - // Original source. - mapping.source = previousSource + segment[1]; - previousSource += segment[1]; - - // Original line. - mapping.originalLine = previousOriginalLine + segment[2]; - previousOriginalLine = mapping.originalLine; - // Lines are stored 0-based - mapping.originalLine += 1; - - // Original column. - mapping.originalColumn = previousOriginalColumn + segment[3]; - previousOriginalColumn = mapping.originalColumn; - - if (segment.length > 4) { - // Original name. - mapping.name = previousName + segment[4]; - previousName += segment[4]; - } - } - - generatedMappings.push(mapping); - if (typeof mapping.originalLine === 'number') { - let currentSource = mapping.source; - while (originalMappings.length <= currentSource) { - originalMappings.push(null); - } - if (originalMappings[currentSource] === null) { - originalMappings[currentSource] = []; - } - originalMappings[currentSource].push(mapping); - } - } - } - - sortGenerated(generatedMappings, subarrayStart); - this.__generatedMappings = generatedMappings; - - for (var i = 0; i < originalMappings.length; i++) { - if (originalMappings[i] != null) { - quickSort(originalMappings[i], util.compareByOriginalPositionsNoSource); - } - } - this.__originalMappings = [].concat(...originalMappings); - }; - -/** - * Find the mapping that best matches the hypothetical "needle" mapping that - * we are searching for in the given "haystack" of mappings. - */ -BasicSourceMapConsumer.prototype._findMapping = - function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, - aColumnName, aComparator, aBias) { - // To return the position we are searching for, we must first find the - // mapping for the given position and then return the opposite position it - // points to. Because the mappings are sorted, we can use binary search to - // find the best mapping. - - if (aNeedle[aLineName] <= 0) { - throw new TypeError('Line must be greater than or equal to 1, got ' - + aNeedle[aLineName]); - } - if (aNeedle[aColumnName] < 0) { - throw new TypeError('Column must be greater than or equal to 0, got ' - + aNeedle[aColumnName]); - } - - return binarySearch.search(aNeedle, aMappings, aComparator, aBias); - }; - -/** - * Compute the last column for each generated mapping. The last column is - * inclusive. - */ -BasicSourceMapConsumer.prototype.computeColumnSpans = - function SourceMapConsumer_computeColumnSpans() { - for (var index = 0; index < this._generatedMappings.length; ++index) { - var mapping = this._generatedMappings[index]; - - // Mappings do not contain a field for the last generated columnt. We - // can come up with an optimistic estimate, however, by assuming that - // mappings are contiguous (i.e. given two consecutive mappings, the - // first mapping ends where the second one starts). - if (index + 1 < this._generatedMappings.length) { - var nextMapping = this._generatedMappings[index + 1]; - - if (mapping.generatedLine === nextMapping.generatedLine) { - mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; - continue; - } - } - - // The last mapping for each line spans the entire line. - mapping.lastGeneratedColumn = Infinity; - } - }; - -/** - * Returns the original source, line, and column information for the generated - * source's line and column positions provided. The only argument is an object - * with the following properties: - * - * - line: The line number in the generated source. The line number - * is 1-based. - * - column: The column number in the generated source. The column - * number is 0-based. - * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or - * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the - * closest element that is smaller than or greater than the one we are - * searching for, respectively, if the exact element cannot be found. - * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. - * - * and an object is returned with the following properties: - * - * - source: The original source file, or null. - * - line: The line number in the original source, or null. The - * line number is 1-based. - * - column: The column number in the original source, or null. The - * column number is 0-based. - * - name: The original identifier, or null. - */ -BasicSourceMapConsumer.prototype.originalPositionFor = - function SourceMapConsumer_originalPositionFor(aArgs) { - var needle = { - generatedLine: util.getArg(aArgs, 'line'), - generatedColumn: util.getArg(aArgs, 'column') - }; - - var index = this._findMapping( - needle, - this._generatedMappings, - "generatedLine", - "generatedColumn", - util.compareByGeneratedPositionsDeflated, - util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) - ); - - if (index >= 0) { - var mapping = this._generatedMappings[index]; - - if (mapping.generatedLine === needle.generatedLine) { - var source = util.getArg(mapping, 'source', null); - if (source !== null) { - source = this._sources.at(source); - source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); - } - var name = util.getArg(mapping, 'name', null); - if (name !== null) { - name = this._names.at(name); - } - return { - source: source, - line: util.getArg(mapping, 'originalLine', null), - column: util.getArg(mapping, 'originalColumn', null), - name: name - }; - } - } - - return { - source: null, - line: null, - column: null, - name: null - }; - }; - -/** - * Return true if we have the source content for every source in the source - * map, false otherwise. - */ -BasicSourceMapConsumer.prototype.hasContentsOfAllSources = - function BasicSourceMapConsumer_hasContentsOfAllSources() { - if (!this.sourcesContent) { - return false; - } - return this.sourcesContent.length >= this._sources.size() && - !this.sourcesContent.some(function (sc) { return sc == null; }); - }; - -/** - * Returns the original source content. The only argument is the url of the - * original source file. Returns null if no original source content is - * available. - */ -BasicSourceMapConsumer.prototype.sourceContentFor = - function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { - if (!this.sourcesContent) { - return null; - } - - var index = this._findSourceIndex(aSource); - if (index >= 0) { - return this.sourcesContent[index]; - } - - var relativeSource = aSource; - if (this.sourceRoot != null) { - relativeSource = util.relative(this.sourceRoot, relativeSource); - } - - var url; - if (this.sourceRoot != null - && (url = util.urlParse(this.sourceRoot))) { - // XXX: file:// URIs and absolute paths lead to unexpected behavior for - // many users. We can help them out when they expect file:// URIs to - // behave like it would if they were running a local HTTP server. See - // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. - var fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); - if (url.scheme == "file" - && this._sources.has(fileUriAbsPath)) { - return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] - } - - if ((!url.path || url.path == "/") - && this._sources.has("/" + relativeSource)) { - return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; - } - } - - // This function is used recursively from - // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we - // don't want to throw if we can't find the source - we just want to - // return null, so we provide a flag to exit gracefully. - if (nullOnMissing) { - return null; - } - else { - throw new Error('"' + relativeSource + '" is not in the SourceMap.'); - } - }; - -/** - * Returns the generated line and column information for the original source, - * line, and column positions provided. The only argument is an object with - * the following properties: - * - * - source: The filename of the original source. - * - line: The line number in the original source. The line number - * is 1-based. - * - column: The column number in the original source. The column - * number is 0-based. - * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or - * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the - * closest element that is smaller than or greater than the one we are - * searching for, respectively, if the exact element cannot be found. - * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. - * - * and an object is returned with the following properties: - * - * - line: The line number in the generated source, or null. The - * line number is 1-based. - * - column: The column number in the generated source, or null. - * The column number is 0-based. - */ -BasicSourceMapConsumer.prototype.generatedPositionFor = - function SourceMapConsumer_generatedPositionFor(aArgs) { - var source = util.getArg(aArgs, 'source'); - source = this._findSourceIndex(source); - if (source < 0) { - return { - line: null, - column: null, - lastColumn: null - }; - } - - var needle = { - source: source, - originalLine: util.getArg(aArgs, 'line'), - originalColumn: util.getArg(aArgs, 'column') - }; - - var index = this._findMapping( - needle, - this._originalMappings, - "originalLine", - "originalColumn", - util.compareByOriginalPositions, - util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) - ); - - if (index >= 0) { - var mapping = this._originalMappings[index]; - - if (mapping.source === needle.source) { - return { - line: util.getArg(mapping, 'generatedLine', null), - column: util.getArg(mapping, 'generatedColumn', null), - lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) - }; - } - } - - return { - line: null, - column: null, - lastColumn: null - }; - }; - -exports.BasicSourceMapConsumer = BasicSourceMapConsumer; - -/** - * An IndexedSourceMapConsumer instance represents a parsed source map which - * we can query for information. It differs from BasicSourceMapConsumer in - * that it takes "indexed" source maps (i.e. ones with a "sections" field) as - * input. - * - * The first parameter is a raw source map (either as a JSON string, or already - * parsed to an object). According to the spec for indexed source maps, they - * have the following attributes: - * - * - version: Which version of the source map spec this map is following. - * - file: Optional. The generated file this source map is associated with. - * - sections: A list of section definitions. - * - * Each value under the "sections" field has two fields: - * - offset: The offset into the original specified at which this section - * begins to apply, defined as an object with a "line" and "column" - * field. - * - map: A source map definition. This source map could also be indexed, - * but doesn't have to be. - * - * Instead of the "map" field, it's also possible to have a "url" field - * specifying a URL to retrieve a source map from, but that's currently - * unsupported. - * - * Here's an example source map, taken from the source map spec[0], but - * modified to omit a section which uses the "url" field. - * - * { - * version : 3, - * file: "app.js", - * sections: [{ - * offset: {line:100, column:10}, - * map: { - * version : 3, - * file: "section.js", - * sources: ["foo.js", "bar.js"], - * names: ["src", "maps", "are", "fun"], - * mappings: "AAAA,E;;ABCDE;" - * } - * }], - * } - * - * The second parameter, if given, is a string whose value is the URL - * at which the source map was found. This URL is used to compute the - * sources array. - * - * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt - */ -function IndexedSourceMapConsumer(aSourceMap, aSourceMapURL) { - var sourceMap = aSourceMap; - if (typeof aSourceMap === 'string') { - sourceMap = util.parseSourceMapInput(aSourceMap); - } - - var version = util.getArg(sourceMap, 'version'); - var sections = util.getArg(sourceMap, 'sections'); - - if (version != this._version) { - throw new Error('Unsupported version: ' + version); - } - - this._sources = new ArraySet(); - this._names = new ArraySet(); - - var lastOffset = { - line: -1, - column: 0 - }; - this._sections = sections.map(function (s) { - if (s.url) { - // The url field will require support for asynchronicity. - // See https://github.com/mozilla/source-map/issues/16 - throw new Error('Support for url field in sections not implemented.'); - } - var offset = util.getArg(s, 'offset'); - var offsetLine = util.getArg(offset, 'line'); - var offsetColumn = util.getArg(offset, 'column'); - - if (offsetLine < lastOffset.line || - (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { - throw new Error('Section offsets must be ordered and non-overlapping.'); - } - lastOffset = offset; - - return { - generatedOffset: { - // The offset fields are 0-based, but we use 1-based indices when - // encoding/decoding from VLQ. - generatedLine: offsetLine + 1, - generatedColumn: offsetColumn + 1 - }, - consumer: new SourceMapConsumer(util.getArg(s, 'map'), aSourceMapURL) - } - }); -} - -IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); -IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; - -/** - * The version of the source mapping spec that we are consuming. - */ -IndexedSourceMapConsumer.prototype._version = 3; - -/** - * The list of original sources. - */ -Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { - get: function () { - var sources = []; - for (var i = 0; i < this._sections.length; i++) { - for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { - sources.push(this._sections[i].consumer.sources[j]); - } - } - return sources; - } -}); - -/** - * Returns the original source, line, and column information for the generated - * source's line and column positions provided. The only argument is an object - * with the following properties: - * - * - line: The line number in the generated source. The line number - * is 1-based. - * - column: The column number in the generated source. The column - * number is 0-based. - * - * and an object is returned with the following properties: - * - * - source: The original source file, or null. - * - line: The line number in the original source, or null. The - * line number is 1-based. - * - column: The column number in the original source, or null. The - * column number is 0-based. - * - name: The original identifier, or null. - */ -IndexedSourceMapConsumer.prototype.originalPositionFor = - function IndexedSourceMapConsumer_originalPositionFor(aArgs) { - var needle = { - generatedLine: util.getArg(aArgs, 'line'), - generatedColumn: util.getArg(aArgs, 'column') - }; - - // Find the section containing the generated position we're trying to map - // to an original position. - var sectionIndex = binarySearch.search(needle, this._sections, - function(needle, section) { - var cmp = needle.generatedLine - section.generatedOffset.generatedLine; - if (cmp) { - return cmp; - } - - return (needle.generatedColumn - - section.generatedOffset.generatedColumn); - }); - var section = this._sections[sectionIndex]; - - if (!section) { - return { - source: null, - line: null, - column: null, - name: null - }; - } - - return section.consumer.originalPositionFor({ - line: needle.generatedLine - - (section.generatedOffset.generatedLine - 1), - column: needle.generatedColumn - - (section.generatedOffset.generatedLine === needle.generatedLine - ? section.generatedOffset.generatedColumn - 1 - : 0), - bias: aArgs.bias - }); - }; - -/** - * Return true if we have the source content for every source in the source - * map, false otherwise. - */ -IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = - function IndexedSourceMapConsumer_hasContentsOfAllSources() { - return this._sections.every(function (s) { - return s.consumer.hasContentsOfAllSources(); - }); - }; - -/** - * Returns the original source content. The only argument is the url of the - * original source file. Returns null if no original source content is - * available. - */ -IndexedSourceMapConsumer.prototype.sourceContentFor = - function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { - for (var i = 0; i < this._sections.length; i++) { - var section = this._sections[i]; - - var content = section.consumer.sourceContentFor(aSource, true); - if (content || content === '') { - return content; - } - } - if (nullOnMissing) { - return null; - } - else { - throw new Error('"' + aSource + '" is not in the SourceMap.'); - } - }; - -/** - * Returns the generated line and column information for the original source, - * line, and column positions provided. The only argument is an object with - * the following properties: - * - * - source: The filename of the original source. - * - line: The line number in the original source. The line number - * is 1-based. - * - column: The column number in the original source. The column - * number is 0-based. - * - * and an object is returned with the following properties: - * - * - line: The line number in the generated source, or null. The - * line number is 1-based. - * - column: The column number in the generated source, or null. - * The column number is 0-based. - */ -IndexedSourceMapConsumer.prototype.generatedPositionFor = - function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { - for (var i = 0; i < this._sections.length; i++) { - var section = this._sections[i]; - - // Only consider this section if the requested source is in the list of - // sources of the consumer. - if (section.consumer._findSourceIndex(util.getArg(aArgs, 'source')) === -1) { - continue; - } - var generatedPosition = section.consumer.generatedPositionFor(aArgs); - if (generatedPosition) { - var ret = { - line: generatedPosition.line + - (section.generatedOffset.generatedLine - 1), - column: generatedPosition.column + - (section.generatedOffset.generatedLine === generatedPosition.line - ? section.generatedOffset.generatedColumn - 1 - : 0) - }; - return ret; - } - } - - return { - line: null, - column: null - }; - }; - -/** - * Parse the mappings in a string in to a data structure which we can easily - * query (the ordered arrays in the `this.__generatedMappings` and - * `this.__originalMappings` properties). - */ -IndexedSourceMapConsumer.prototype._parseMappings = - function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { - this.__generatedMappings = []; - this.__originalMappings = []; - for (var i = 0; i < this._sections.length; i++) { - var section = this._sections[i]; - var sectionMappings = section.consumer._generatedMappings; - for (var j = 0; j < sectionMappings.length; j++) { - var mapping = sectionMappings[j]; - - var source = section.consumer._sources.at(mapping.source); - if(source !== null) { - source = util.computeSourceURL(section.consumer.sourceRoot, source, this._sourceMapURL); - } - this._sources.add(source); - source = this._sources.indexOf(source); - - var name = null; - if (mapping.name) { - name = section.consumer._names.at(mapping.name); - this._names.add(name); - name = this._names.indexOf(name); - } - - // The mappings coming from the consumer for the section have - // generated positions relative to the start of the section, so we - // need to offset them to be relative to the start of the concatenated - // generated file. - var adjustedMapping = { - source: source, - generatedLine: mapping.generatedLine + - (section.generatedOffset.generatedLine - 1), - generatedColumn: mapping.generatedColumn + - (section.generatedOffset.generatedLine === mapping.generatedLine - ? section.generatedOffset.generatedColumn - 1 - : 0), - originalLine: mapping.originalLine, - originalColumn: mapping.originalColumn, - name: name - }; - - this.__generatedMappings.push(adjustedMapping); - if (typeof adjustedMapping.originalLine === 'number') { - this.__originalMappings.push(adjustedMapping); - } - } - } - - quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); - quickSort(this.__originalMappings, util.compareByOriginalPositions); - }; - -exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map-js/lib/source-map-generator.d.ts b/node_modules/source-map-js/lib/source-map-generator.d.ts deleted file mode 100644 index f59d70a..0000000 --- a/node_modules/source-map-js/lib/source-map-generator.d.ts +++ /dev/null @@ -1 +0,0 @@ -export { SourceMapGenerator } from '..'; diff --git a/node_modules/source-map-js/lib/source-map-generator.js b/node_modules/source-map-js/lib/source-map-generator.js deleted file mode 100644 index bab04ff..0000000 --- a/node_modules/source-map-js/lib/source-map-generator.js +++ /dev/null @@ -1,444 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var base64VLQ = require('./base64-vlq'); -var util = require('./util'); -var ArraySet = require('./array-set').ArraySet; -var MappingList = require('./mapping-list').MappingList; - -/** - * An instance of the SourceMapGenerator represents a source map which is - * being built incrementally. You may pass an object with the following - * properties: - * - * - file: The filename of the generated source. - * - sourceRoot: A root for all relative URLs in this source map. - */ -function SourceMapGenerator(aArgs) { - if (!aArgs) { - aArgs = {}; - } - this._file = util.getArg(aArgs, 'file', null); - this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); - this._skipValidation = util.getArg(aArgs, 'skipValidation', false); - this._ignoreInvalidMapping = util.getArg(aArgs, 'ignoreInvalidMapping', false); - this._sources = new ArraySet(); - this._names = new ArraySet(); - this._mappings = new MappingList(); - this._sourcesContents = null; -} - -SourceMapGenerator.prototype._version = 3; - -/** - * Creates a new SourceMapGenerator based on a SourceMapConsumer - * - * @param aSourceMapConsumer The SourceMap. - */ -SourceMapGenerator.fromSourceMap = - function SourceMapGenerator_fromSourceMap(aSourceMapConsumer, generatorOps) { - var sourceRoot = aSourceMapConsumer.sourceRoot; - var generator = new SourceMapGenerator(Object.assign(generatorOps || {}, { - file: aSourceMapConsumer.file, - sourceRoot: sourceRoot - })); - aSourceMapConsumer.eachMapping(function (mapping) { - var newMapping = { - generated: { - line: mapping.generatedLine, - column: mapping.generatedColumn - } - }; - - if (mapping.source != null) { - newMapping.source = mapping.source; - if (sourceRoot != null) { - newMapping.source = util.relative(sourceRoot, newMapping.source); - } - - newMapping.original = { - line: mapping.originalLine, - column: mapping.originalColumn - }; - - if (mapping.name != null) { - newMapping.name = mapping.name; - } - } - - generator.addMapping(newMapping); - }); - aSourceMapConsumer.sources.forEach(function (sourceFile) { - var sourceRelative = sourceFile; - if (sourceRoot !== null) { - sourceRelative = util.relative(sourceRoot, sourceFile); - } - - if (!generator._sources.has(sourceRelative)) { - generator._sources.add(sourceRelative); - } - - var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content != null) { - generator.setSourceContent(sourceFile, content); - } - }); - return generator; - }; - -/** - * Add a single mapping from original source line and column to the generated - * source's line and column for this source map being created. The mapping - * object should have the following properties: - * - * - generated: An object with the generated line and column positions. - * - original: An object with the original line and column positions. - * - source: The original source file (relative to the sourceRoot). - * - name: An optional original token name for this mapping. - */ -SourceMapGenerator.prototype.addMapping = - function SourceMapGenerator_addMapping(aArgs) { - var generated = util.getArg(aArgs, 'generated'); - var original = util.getArg(aArgs, 'original', null); - var source = util.getArg(aArgs, 'source', null); - var name = util.getArg(aArgs, 'name', null); - - if (!this._skipValidation) { - if (this._validateMapping(generated, original, source, name) === false) { - return; - } - } - - if (source != null) { - source = String(source); - if (!this._sources.has(source)) { - this._sources.add(source); - } - } - - if (name != null) { - name = String(name); - if (!this._names.has(name)) { - this._names.add(name); - } - } - - this._mappings.add({ - generatedLine: generated.line, - generatedColumn: generated.column, - originalLine: original != null && original.line, - originalColumn: original != null && original.column, - source: source, - name: name - }); - }; - -/** - * Set the source content for a source file. - */ -SourceMapGenerator.prototype.setSourceContent = - function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { - var source = aSourceFile; - if (this._sourceRoot != null) { - source = util.relative(this._sourceRoot, source); - } - - if (aSourceContent != null) { - // Add the source content to the _sourcesContents map. - // Create a new _sourcesContents map if the property is null. - if (!this._sourcesContents) { - this._sourcesContents = Object.create(null); - } - this._sourcesContents[util.toSetString(source)] = aSourceContent; - } else if (this._sourcesContents) { - // Remove the source file from the _sourcesContents map. - // If the _sourcesContents map is empty, set the property to null. - delete this._sourcesContents[util.toSetString(source)]; - if (Object.keys(this._sourcesContents).length === 0) { - this._sourcesContents = null; - } - } - }; - -/** - * Applies the mappings of a sub-source-map for a specific source file to the - * source map being generated. Each mapping to the supplied source file is - * rewritten using the supplied source map. Note: The resolution for the - * resulting mappings is the minimium of this map and the supplied map. - * - * @param aSourceMapConsumer The source map to be applied. - * @param aSourceFile Optional. The filename of the source file. - * If omitted, SourceMapConsumer's file property will be used. - * @param aSourceMapPath Optional. The dirname of the path to the source map - * to be applied. If relative, it is relative to the SourceMapConsumer. - * This parameter is needed when the two source maps aren't in the same - * directory, and the source map to be applied contains relative source - * paths. If so, those relative source paths need to be rewritten - * relative to the SourceMapGenerator. - */ -SourceMapGenerator.prototype.applySourceMap = - function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { - var sourceFile = aSourceFile; - // If aSourceFile is omitted, we will use the file property of the SourceMap - if (aSourceFile == null) { - if (aSourceMapConsumer.file == null) { - throw new Error( - 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + - 'or the source map\'s "file" property. Both were omitted.' - ); - } - sourceFile = aSourceMapConsumer.file; - } - var sourceRoot = this._sourceRoot; - // Make "sourceFile" relative if an absolute Url is passed. - if (sourceRoot != null) { - sourceFile = util.relative(sourceRoot, sourceFile); - } - // Applying the SourceMap can add and remove items from the sources and - // the names array. - var newSources = new ArraySet(); - var newNames = new ArraySet(); - - // Find mappings for the "sourceFile" - this._mappings.unsortedForEach(function (mapping) { - if (mapping.source === sourceFile && mapping.originalLine != null) { - // Check if it can be mapped by the source map, then update the mapping. - var original = aSourceMapConsumer.originalPositionFor({ - line: mapping.originalLine, - column: mapping.originalColumn - }); - if (original.source != null) { - // Copy mapping - mapping.source = original.source; - if (aSourceMapPath != null) { - mapping.source = util.join(aSourceMapPath, mapping.source) - } - if (sourceRoot != null) { - mapping.source = util.relative(sourceRoot, mapping.source); - } - mapping.originalLine = original.line; - mapping.originalColumn = original.column; - if (original.name != null) { - mapping.name = original.name; - } - } - } - - var source = mapping.source; - if (source != null && !newSources.has(source)) { - newSources.add(source); - } - - var name = mapping.name; - if (name != null && !newNames.has(name)) { - newNames.add(name); - } - - }, this); - this._sources = newSources; - this._names = newNames; - - // Copy sourcesContents of applied map. - aSourceMapConsumer.sources.forEach(function (sourceFile) { - var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content != null) { - if (aSourceMapPath != null) { - sourceFile = util.join(aSourceMapPath, sourceFile); - } - if (sourceRoot != null) { - sourceFile = util.relative(sourceRoot, sourceFile); - } - this.setSourceContent(sourceFile, content); - } - }, this); - }; - -/** - * A mapping can have one of the three levels of data: - * - * 1. Just the generated position. - * 2. The Generated position, original position, and original source. - * 3. Generated and original position, original source, as well as a name - * token. - * - * To maintain consistency, we validate that any new mapping being added falls - * in to one of these categories. - */ -SourceMapGenerator.prototype._validateMapping = - function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, - aName) { - // When aOriginal is truthy but has empty values for .line and .column, - // it is most likely a programmer error. In this case we throw a very - // specific error message to try to guide them the right way. - // For example: https://github.com/Polymer/polymer-bundler/pull/519 - if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { - var message = 'original.line and original.column are not numbers -- you probably meant to omit ' + - 'the original mapping entirely and only map the generated position. If so, pass ' + - 'null for the original mapping instead of an object with empty or null values.' - - if (this._ignoreInvalidMapping) { - if (typeof console !== 'undefined' && console.warn) { - console.warn(message); - } - return false; - } else { - throw new Error(message); - } - } - - if (aGenerated && 'line' in aGenerated && 'column' in aGenerated - && aGenerated.line > 0 && aGenerated.column >= 0 - && !aOriginal && !aSource && !aName) { - // Case 1. - return; - } - else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated - && aOriginal && 'line' in aOriginal && 'column' in aOriginal - && aGenerated.line > 0 && aGenerated.column >= 0 - && aOriginal.line > 0 && aOriginal.column >= 0 - && aSource) { - // Cases 2 and 3. - return; - } - else { - var message = 'Invalid mapping: ' + JSON.stringify({ - generated: aGenerated, - source: aSource, - original: aOriginal, - name: aName - }); - - if (this._ignoreInvalidMapping) { - if (typeof console !== 'undefined' && console.warn) { - console.warn(message); - } - return false; - } else { - throw new Error(message) - } - } - }; - -/** - * Serialize the accumulated mappings in to the stream of base 64 VLQs - * specified by the source map format. - */ -SourceMapGenerator.prototype._serializeMappings = - function SourceMapGenerator_serializeMappings() { - var previousGeneratedColumn = 0; - var previousGeneratedLine = 1; - var previousOriginalColumn = 0; - var previousOriginalLine = 0; - var previousName = 0; - var previousSource = 0; - var result = ''; - var next; - var mapping; - var nameIdx; - var sourceIdx; - - var mappings = this._mappings.toArray(); - for (var i = 0, len = mappings.length; i < len; i++) { - mapping = mappings[i]; - next = '' - - if (mapping.generatedLine !== previousGeneratedLine) { - previousGeneratedColumn = 0; - while (mapping.generatedLine !== previousGeneratedLine) { - next += ';'; - previousGeneratedLine++; - } - } - else { - if (i > 0) { - if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { - continue; - } - next += ','; - } - } - - next += base64VLQ.encode(mapping.generatedColumn - - previousGeneratedColumn); - previousGeneratedColumn = mapping.generatedColumn; - - if (mapping.source != null) { - sourceIdx = this._sources.indexOf(mapping.source); - next += base64VLQ.encode(sourceIdx - previousSource); - previousSource = sourceIdx; - - // lines are stored 0-based in SourceMap spec version 3 - next += base64VLQ.encode(mapping.originalLine - 1 - - previousOriginalLine); - previousOriginalLine = mapping.originalLine - 1; - - next += base64VLQ.encode(mapping.originalColumn - - previousOriginalColumn); - previousOriginalColumn = mapping.originalColumn; - - if (mapping.name != null) { - nameIdx = this._names.indexOf(mapping.name); - next += base64VLQ.encode(nameIdx - previousName); - previousName = nameIdx; - } - } - - result += next; - } - - return result; - }; - -SourceMapGenerator.prototype._generateSourcesContent = - function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { - return aSources.map(function (source) { - if (!this._sourcesContents) { - return null; - } - if (aSourceRoot != null) { - source = util.relative(aSourceRoot, source); - } - var key = util.toSetString(source); - return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) - ? this._sourcesContents[key] - : null; - }, this); - }; - -/** - * Externalize the source map. - */ -SourceMapGenerator.prototype.toJSON = - function SourceMapGenerator_toJSON() { - var map = { - version: this._version, - sources: this._sources.toArray(), - names: this._names.toArray(), - mappings: this._serializeMappings() - }; - if (this._file != null) { - map.file = this._file; - } - if (this._sourceRoot != null) { - map.sourceRoot = this._sourceRoot; - } - if (this._sourcesContents) { - map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); - } - - return map; - }; - -/** - * Render the source map being generated to a string. - */ -SourceMapGenerator.prototype.toString = - function SourceMapGenerator_toString() { - return JSON.stringify(this.toJSON()); - }; - -exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map-js/lib/source-node.d.ts b/node_modules/source-map-js/lib/source-node.d.ts deleted file mode 100644 index 4df6a1a..0000000 --- a/node_modules/source-map-js/lib/source-node.d.ts +++ /dev/null @@ -1 +0,0 @@ -export { SourceNode } from '..'; diff --git a/node_modules/source-map-js/lib/source-node.js b/node_modules/source-map-js/lib/source-node.js deleted file mode 100644 index 8bcdbe3..0000000 --- a/node_modules/source-map-js/lib/source-node.js +++ /dev/null @@ -1,413 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; -var util = require('./util'); - -// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other -// operating systems these days (capturing the result). -var REGEX_NEWLINE = /(\r?\n)/; - -// Newline character code for charCodeAt() comparisons -var NEWLINE_CODE = 10; - -// Private symbol for identifying `SourceNode`s when multiple versions of -// the source-map library are loaded. This MUST NOT CHANGE across -// versions! -var isSourceNode = "$$$isSourceNode$$$"; - -/** - * SourceNodes provide a way to abstract over interpolating/concatenating - * snippets of generated JavaScript source code while maintaining the line and - * column information associated with the original source code. - * - * @param aLine The original line number. - * @param aColumn The original column number. - * @param aSource The original source's filename. - * @param aChunks Optional. An array of strings which are snippets of - * generated JS, or other SourceNodes. - * @param aName The original identifier. - */ -function SourceNode(aLine, aColumn, aSource, aChunks, aName) { - this.children = []; - this.sourceContents = {}; - this.line = aLine == null ? null : aLine; - this.column = aColumn == null ? null : aColumn; - this.source = aSource == null ? null : aSource; - this.name = aName == null ? null : aName; - this[isSourceNode] = true; - if (aChunks != null) this.add(aChunks); -} - -/** - * Creates a SourceNode from generated code and a SourceMapConsumer. - * - * @param aGeneratedCode The generated code - * @param aSourceMapConsumer The SourceMap for the generated code - * @param aRelativePath Optional. The path that relative sources in the - * SourceMapConsumer should be relative to. - */ -SourceNode.fromStringWithSourceMap = - function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { - // The SourceNode we want to fill with the generated code - // and the SourceMap - var node = new SourceNode(); - - // All even indices of this array are one line of the generated code, - // while all odd indices are the newlines between two adjacent lines - // (since `REGEX_NEWLINE` captures its match). - // Processed fragments are accessed by calling `shiftNextLine`. - var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); - var remainingLinesIndex = 0; - var shiftNextLine = function() { - var lineContents = getNextLine(); - // The last line of a file might not have a newline. - var newLine = getNextLine() || ""; - return lineContents + newLine; - - function getNextLine() { - return remainingLinesIndex < remainingLines.length ? - remainingLines[remainingLinesIndex++] : undefined; - } - }; - - // We need to remember the position of "remainingLines" - var lastGeneratedLine = 1, lastGeneratedColumn = 0; - - // The generate SourceNodes we need a code range. - // To extract it current and last mapping is used. - // Here we store the last mapping. - var lastMapping = null; - - aSourceMapConsumer.eachMapping(function (mapping) { - if (lastMapping !== null) { - // We add the code from "lastMapping" to "mapping": - // First check if there is a new line in between. - if (lastGeneratedLine < mapping.generatedLine) { - // Associate first line with "lastMapping" - addMappingWithCode(lastMapping, shiftNextLine()); - lastGeneratedLine++; - lastGeneratedColumn = 0; - // The remaining code is added without mapping - } else { - // There is no new line in between. - // Associate the code between "lastGeneratedColumn" and - // "mapping.generatedColumn" with "lastMapping" - var nextLine = remainingLines[remainingLinesIndex] || ''; - var code = nextLine.substr(0, mapping.generatedColumn - - lastGeneratedColumn); - remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - - lastGeneratedColumn); - lastGeneratedColumn = mapping.generatedColumn; - addMappingWithCode(lastMapping, code); - // No more remaining code, continue - lastMapping = mapping; - return; - } - } - // We add the generated code until the first mapping - // to the SourceNode without any mapping. - // Each line is added as separate string. - while (lastGeneratedLine < mapping.generatedLine) { - node.add(shiftNextLine()); - lastGeneratedLine++; - } - if (lastGeneratedColumn < mapping.generatedColumn) { - var nextLine = remainingLines[remainingLinesIndex] || ''; - node.add(nextLine.substr(0, mapping.generatedColumn)); - remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); - lastGeneratedColumn = mapping.generatedColumn; - } - lastMapping = mapping; - }, this); - // We have processed all mappings. - if (remainingLinesIndex < remainingLines.length) { - if (lastMapping) { - // Associate the remaining code in the current line with "lastMapping" - addMappingWithCode(lastMapping, shiftNextLine()); - } - // and add the remaining lines without any mapping - node.add(remainingLines.splice(remainingLinesIndex).join("")); - } - - // Copy sourcesContent into SourceNode - aSourceMapConsumer.sources.forEach(function (sourceFile) { - var content = aSourceMapConsumer.sourceContentFor(sourceFile); - if (content != null) { - if (aRelativePath != null) { - sourceFile = util.join(aRelativePath, sourceFile); - } - node.setSourceContent(sourceFile, content); - } - }); - - return node; - - function addMappingWithCode(mapping, code) { - if (mapping === null || mapping.source === undefined) { - node.add(code); - } else { - var source = aRelativePath - ? util.join(aRelativePath, mapping.source) - : mapping.source; - node.add(new SourceNode(mapping.originalLine, - mapping.originalColumn, - source, - code, - mapping.name)); - } - } - }; - -/** - * Add a chunk of generated JS to this source node. - * - * @param aChunk A string snippet of generated JS code, another instance of - * SourceNode, or an array where each member is one of those things. - */ -SourceNode.prototype.add = function SourceNode_add(aChunk) { - if (Array.isArray(aChunk)) { - aChunk.forEach(function (chunk) { - this.add(chunk); - }, this); - } - else if (aChunk[isSourceNode] || typeof aChunk === "string") { - if (aChunk) { - this.children.push(aChunk); - } - } - else { - throw new TypeError( - "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk - ); - } - return this; -}; - -/** - * Add a chunk of generated JS to the beginning of this source node. - * - * @param aChunk A string snippet of generated JS code, another instance of - * SourceNode, or an array where each member is one of those things. - */ -SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { - if (Array.isArray(aChunk)) { - for (var i = aChunk.length-1; i >= 0; i--) { - this.prepend(aChunk[i]); - } - } - else if (aChunk[isSourceNode] || typeof aChunk === "string") { - this.children.unshift(aChunk); - } - else { - throw new TypeError( - "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk - ); - } - return this; -}; - -/** - * Walk over the tree of JS snippets in this node and its children. The - * walking function is called once for each snippet of JS and is passed that - * snippet and the its original associated source's line/column location. - * - * @param aFn The traversal function. - */ -SourceNode.prototype.walk = function SourceNode_walk(aFn) { - var chunk; - for (var i = 0, len = this.children.length; i < len; i++) { - chunk = this.children[i]; - if (chunk[isSourceNode]) { - chunk.walk(aFn); - } - else { - if (chunk !== '') { - aFn(chunk, { source: this.source, - line: this.line, - column: this.column, - name: this.name }); - } - } - } -}; - -/** - * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between - * each of `this.children`. - * - * @param aSep The separator. - */ -SourceNode.prototype.join = function SourceNode_join(aSep) { - var newChildren; - var i; - var len = this.children.length; - if (len > 0) { - newChildren = []; - for (i = 0; i < len-1; i++) { - newChildren.push(this.children[i]); - newChildren.push(aSep); - } - newChildren.push(this.children[i]); - this.children = newChildren; - } - return this; -}; - -/** - * Call String.prototype.replace on the very right-most source snippet. Useful - * for trimming whitespace from the end of a source node, etc. - * - * @param aPattern The pattern to replace. - * @param aReplacement The thing to replace the pattern with. - */ -SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { - var lastChild = this.children[this.children.length - 1]; - if (lastChild[isSourceNode]) { - lastChild.replaceRight(aPattern, aReplacement); - } - else if (typeof lastChild === 'string') { - this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); - } - else { - this.children.push(''.replace(aPattern, aReplacement)); - } - return this; -}; - -/** - * Set the source content for a source file. This will be added to the SourceMapGenerator - * in the sourcesContent field. - * - * @param aSourceFile The filename of the source file - * @param aSourceContent The content of the source file - */ -SourceNode.prototype.setSourceContent = - function SourceNode_setSourceContent(aSourceFile, aSourceContent) { - this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; - }; - -/** - * Walk over the tree of SourceNodes. The walking function is called for each - * source file content and is passed the filename and source content. - * - * @param aFn The traversal function. - */ -SourceNode.prototype.walkSourceContents = - function SourceNode_walkSourceContents(aFn) { - for (var i = 0, len = this.children.length; i < len; i++) { - if (this.children[i][isSourceNode]) { - this.children[i].walkSourceContents(aFn); - } - } - - var sources = Object.keys(this.sourceContents); - for (var i = 0, len = sources.length; i < len; i++) { - aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); - } - }; - -/** - * Return the string representation of this source node. Walks over the tree - * and concatenates all the various snippets together to one string. - */ -SourceNode.prototype.toString = function SourceNode_toString() { - var str = ""; - this.walk(function (chunk) { - str += chunk; - }); - return str; -}; - -/** - * Returns the string representation of this source node along with a source - * map. - */ -SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { - var generated = { - code: "", - line: 1, - column: 0 - }; - var map = new SourceMapGenerator(aArgs); - var sourceMappingActive = false; - var lastOriginalSource = null; - var lastOriginalLine = null; - var lastOriginalColumn = null; - var lastOriginalName = null; - this.walk(function (chunk, original) { - generated.code += chunk; - if (original.source !== null - && original.line !== null - && original.column !== null) { - if(lastOriginalSource !== original.source - || lastOriginalLine !== original.line - || lastOriginalColumn !== original.column - || lastOriginalName !== original.name) { - map.addMapping({ - source: original.source, - original: { - line: original.line, - column: original.column - }, - generated: { - line: generated.line, - column: generated.column - }, - name: original.name - }); - } - lastOriginalSource = original.source; - lastOriginalLine = original.line; - lastOriginalColumn = original.column; - lastOriginalName = original.name; - sourceMappingActive = true; - } else if (sourceMappingActive) { - map.addMapping({ - generated: { - line: generated.line, - column: generated.column - } - }); - lastOriginalSource = null; - sourceMappingActive = false; - } - for (var idx = 0, length = chunk.length; idx < length; idx++) { - if (chunk.charCodeAt(idx) === NEWLINE_CODE) { - generated.line++; - generated.column = 0; - // Mappings end at eol - if (idx + 1 === length) { - lastOriginalSource = null; - sourceMappingActive = false; - } else if (sourceMappingActive) { - map.addMapping({ - source: original.source, - original: { - line: original.line, - column: original.column - }, - generated: { - line: generated.line, - column: generated.column - }, - name: original.name - }); - } - } else { - generated.column++; - } - } - }); - this.walkSourceContents(function (sourceFile, sourceContent) { - map.setSourceContent(sourceFile, sourceContent); - }); - - return { code: generated.code, map: map }; -}; - -exports.SourceNode = SourceNode; diff --git a/node_modules/source-map-js/lib/util.js b/node_modules/source-map-js/lib/util.js deleted file mode 100644 index 430e2d0..0000000 --- a/node_modules/source-map-js/lib/util.js +++ /dev/null @@ -1,594 +0,0 @@ -/* -*- Mode: js; js-indent-level: 2; -*- */ -/* - * Copyright 2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE or: - * http://opensource.org/licenses/BSD-3-Clause - */ - -/** - * This is a helper function for getting values from parameter/options - * objects. - * - * @param args The object we are extracting values from - * @param name The name of the property we are getting. - * @param defaultValue An optional value to return if the property is missing - * from the object. If this is not specified and the property is missing, an - * error will be thrown. - */ -function getArg(aArgs, aName, aDefaultValue) { - if (aName in aArgs) { - return aArgs[aName]; - } else if (arguments.length === 3) { - return aDefaultValue; - } else { - throw new Error('"' + aName + '" is a required argument.'); - } -} -exports.getArg = getArg; - -var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; -var dataUrlRegexp = /^data:.+\,.+$/; - -function urlParse(aUrl) { - var match = aUrl.match(urlRegexp); - if (!match) { - return null; - } - return { - scheme: match[1], - auth: match[2], - host: match[3], - port: match[4], - path: match[5] - }; -} -exports.urlParse = urlParse; - -function urlGenerate(aParsedUrl) { - var url = ''; - if (aParsedUrl.scheme) { - url += aParsedUrl.scheme + ':'; - } - url += '//'; - if (aParsedUrl.auth) { - url += aParsedUrl.auth + '@'; - } - if (aParsedUrl.host) { - url += aParsedUrl.host; - } - if (aParsedUrl.port) { - url += ":" + aParsedUrl.port - } - if (aParsedUrl.path) { - url += aParsedUrl.path; - } - return url; -} -exports.urlGenerate = urlGenerate; - -var MAX_CACHED_INPUTS = 32; - -/** - * Takes some function `f(input) -> result` and returns a memoized version of - * `f`. - * - * We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The - * memoization is a dumb-simple, linear least-recently-used cache. - */ -function lruMemoize(f) { - var cache = []; - - return function(input) { - for (var i = 0; i < cache.length; i++) { - if (cache[i].input === input) { - var temp = cache[0]; - cache[0] = cache[i]; - cache[i] = temp; - return cache[0].result; - } - } - - var result = f(input); - - cache.unshift({ - input, - result, - }); - - if (cache.length > MAX_CACHED_INPUTS) { - cache.pop(); - } - - return result; - }; -} - -/** - * Normalizes a path, or the path portion of a URL: - * - * - Replaces consecutive slashes with one slash. - * - Removes unnecessary '.' parts. - * - Removes unnecessary '/..' parts. - * - * Based on code in the Node.js 'path' core module. - * - * @param aPath The path or url to normalize. - */ -var normalize = lruMemoize(function normalize(aPath) { - var path = aPath; - var url = urlParse(aPath); - if (url) { - if (!url.path) { - return aPath; - } - path = url.path; - } - var isAbsolute = exports.isAbsolute(path); - // Split the path into parts between `/` characters. This is much faster than - // using `.split(/\/+/g)`. - var parts = []; - var start = 0; - var i = 0; - while (true) { - start = i; - i = path.indexOf("/", start); - if (i === -1) { - parts.push(path.slice(start)); - break; - } else { - parts.push(path.slice(start, i)); - while (i < path.length && path[i] === "/") { - i++; - } - } - } - - for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { - part = parts[i]; - if (part === '.') { - parts.splice(i, 1); - } else if (part === '..') { - up++; - } else if (up > 0) { - if (part === '') { - // The first part is blank if the path is absolute. Trying to go - // above the root is a no-op. Therefore we can remove all '..' parts - // directly after the root. - parts.splice(i + 1, up); - up = 0; - } else { - parts.splice(i, 2); - up--; - } - } - } - path = parts.join('/'); - - if (path === '') { - path = isAbsolute ? '/' : '.'; - } - - if (url) { - url.path = path; - return urlGenerate(url); - } - return path; -}); -exports.normalize = normalize; - -/** - * Joins two paths/URLs. - * - * @param aRoot The root path or URL. - * @param aPath The path or URL to be joined with the root. - * - * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a - * scheme-relative URL: Then the scheme of aRoot, if any, is prepended - * first. - * - Otherwise aPath is a path. If aRoot is a URL, then its path portion - * is updated with the result and aRoot is returned. Otherwise the result - * is returned. - * - If aPath is absolute, the result is aPath. - * - Otherwise the two paths are joined with a slash. - * - Joining for example 'http://' and 'www.example.com' is also supported. - */ -function join(aRoot, aPath) { - if (aRoot === "") { - aRoot = "."; - } - if (aPath === "") { - aPath = "."; - } - var aPathUrl = urlParse(aPath); - var aRootUrl = urlParse(aRoot); - if (aRootUrl) { - aRoot = aRootUrl.path || '/'; - } - - // `join(foo, '//www.example.org')` - if (aPathUrl && !aPathUrl.scheme) { - if (aRootUrl) { - aPathUrl.scheme = aRootUrl.scheme; - } - return urlGenerate(aPathUrl); - } - - if (aPathUrl || aPath.match(dataUrlRegexp)) { - return aPath; - } - - // `join('http://', 'www.example.com')` - if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { - aRootUrl.host = aPath; - return urlGenerate(aRootUrl); - } - - var joined = aPath.charAt(0) === '/' - ? aPath - : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); - - if (aRootUrl) { - aRootUrl.path = joined; - return urlGenerate(aRootUrl); - } - return joined; -} -exports.join = join; - -exports.isAbsolute = function (aPath) { - return aPath.charAt(0) === '/' || urlRegexp.test(aPath); -}; - -/** - * Make a path relative to a URL or another path. - * - * @param aRoot The root path or URL. - * @param aPath The path or URL to be made relative to aRoot. - */ -function relative(aRoot, aPath) { - if (aRoot === "") { - aRoot = "."; - } - - aRoot = aRoot.replace(/\/$/, ''); - - // It is possible for the path to be above the root. In this case, simply - // checking whether the root is a prefix of the path won't work. Instead, we - // need to remove components from the root one by one, until either we find - // a prefix that fits, or we run out of components to remove. - var level = 0; - while (aPath.indexOf(aRoot + '/') !== 0) { - var index = aRoot.lastIndexOf("/"); - if (index < 0) { - return aPath; - } - - // If the only part of the root that is left is the scheme (i.e. http://, - // file:///, etc.), one or more slashes (/), or simply nothing at all, we - // have exhausted all components, so the path is not relative to the root. - aRoot = aRoot.slice(0, index); - if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { - return aPath; - } - - ++level; - } - - // Make sure we add a "../" for each component we removed from the root. - return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); -} -exports.relative = relative; - -var supportsNullProto = (function () { - var obj = Object.create(null); - return !('__proto__' in obj); -}()); - -function identity (s) { - return s; -} - -/** - * Because behavior goes wacky when you set `__proto__` on objects, we - * have to prefix all the strings in our set with an arbitrary character. - * - * See https://github.com/mozilla/source-map/pull/31 and - * https://github.com/mozilla/source-map/issues/30 - * - * @param String aStr - */ -function toSetString(aStr) { - if (isProtoString(aStr)) { - return '$' + aStr; - } - - return aStr; -} -exports.toSetString = supportsNullProto ? identity : toSetString; - -function fromSetString(aStr) { - if (isProtoString(aStr)) { - return aStr.slice(1); - } - - return aStr; -} -exports.fromSetString = supportsNullProto ? identity : fromSetString; - -function isProtoString(s) { - if (!s) { - return false; - } - - var length = s.length; - - if (length < 9 /* "__proto__".length */) { - return false; - } - - if (s.charCodeAt(length - 1) !== 95 /* '_' */ || - s.charCodeAt(length - 2) !== 95 /* '_' */ || - s.charCodeAt(length - 3) !== 111 /* 'o' */ || - s.charCodeAt(length - 4) !== 116 /* 't' */ || - s.charCodeAt(length - 5) !== 111 /* 'o' */ || - s.charCodeAt(length - 6) !== 114 /* 'r' */ || - s.charCodeAt(length - 7) !== 112 /* 'p' */ || - s.charCodeAt(length - 8) !== 95 /* '_' */ || - s.charCodeAt(length - 9) !== 95 /* '_' */) { - return false; - } - - for (var i = length - 10; i >= 0; i--) { - if (s.charCodeAt(i) !== 36 /* '$' */) { - return false; - } - } - - return true; -} - -/** - * Comparator between two mappings where the original positions are compared. - * - * Optionally pass in `true` as `onlyCompareGenerated` to consider two - * mappings with the same original source/line/column, but different generated - * line and column the same. Useful when searching for a mapping with a - * stubbed out mapping. - */ -function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { - var cmp = strcmp(mappingA.source, mappingB.source); - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalLine - mappingB.originalLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalColumn - mappingB.originalColumn; - if (cmp !== 0 || onlyCompareOriginal) { - return cmp; - } - - cmp = mappingA.generatedColumn - mappingB.generatedColumn; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.generatedLine - mappingB.generatedLine; - if (cmp !== 0) { - return cmp; - } - - return strcmp(mappingA.name, mappingB.name); -} -exports.compareByOriginalPositions = compareByOriginalPositions; - -function compareByOriginalPositionsNoSource(mappingA, mappingB, onlyCompareOriginal) { - var cmp - - cmp = mappingA.originalLine - mappingB.originalLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalColumn - mappingB.originalColumn; - if (cmp !== 0 || onlyCompareOriginal) { - return cmp; - } - - cmp = mappingA.generatedColumn - mappingB.generatedColumn; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.generatedLine - mappingB.generatedLine; - if (cmp !== 0) { - return cmp; - } - - return strcmp(mappingA.name, mappingB.name); -} -exports.compareByOriginalPositionsNoSource = compareByOriginalPositionsNoSource; - -/** - * Comparator between two mappings with deflated source and name indices where - * the generated positions are compared. - * - * Optionally pass in `true` as `onlyCompareGenerated` to consider two - * mappings with the same generated line and column, but different - * source/name/original line and column the same. Useful when searching for a - * mapping with a stubbed out mapping. - */ -function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { - var cmp = mappingA.generatedLine - mappingB.generatedLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.generatedColumn - mappingB.generatedColumn; - if (cmp !== 0 || onlyCompareGenerated) { - return cmp; - } - - cmp = strcmp(mappingA.source, mappingB.source); - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalLine - mappingB.originalLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalColumn - mappingB.originalColumn; - if (cmp !== 0) { - return cmp; - } - - return strcmp(mappingA.name, mappingB.name); -} -exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; - -function compareByGeneratedPositionsDeflatedNoLine(mappingA, mappingB, onlyCompareGenerated) { - var cmp = mappingA.generatedColumn - mappingB.generatedColumn; - if (cmp !== 0 || onlyCompareGenerated) { - return cmp; - } - - cmp = strcmp(mappingA.source, mappingB.source); - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalLine - mappingB.originalLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalColumn - mappingB.originalColumn; - if (cmp !== 0) { - return cmp; - } - - return strcmp(mappingA.name, mappingB.name); -} -exports.compareByGeneratedPositionsDeflatedNoLine = compareByGeneratedPositionsDeflatedNoLine; - -function strcmp(aStr1, aStr2) { - if (aStr1 === aStr2) { - return 0; - } - - if (aStr1 === null) { - return 1; // aStr2 !== null - } - - if (aStr2 === null) { - return -1; // aStr1 !== null - } - - if (aStr1 > aStr2) { - return 1; - } - - return -1; -} - -/** - * Comparator between two mappings with inflated source and name strings where - * the generated positions are compared. - */ -function compareByGeneratedPositionsInflated(mappingA, mappingB) { - var cmp = mappingA.generatedLine - mappingB.generatedLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.generatedColumn - mappingB.generatedColumn; - if (cmp !== 0) { - return cmp; - } - - cmp = strcmp(mappingA.source, mappingB.source); - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalLine - mappingB.originalLine; - if (cmp !== 0) { - return cmp; - } - - cmp = mappingA.originalColumn - mappingB.originalColumn; - if (cmp !== 0) { - return cmp; - } - - return strcmp(mappingA.name, mappingB.name); -} -exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; - -/** - * Strip any JSON XSSI avoidance prefix from the string (as documented - * in the source maps specification), and then parse the string as - * JSON. - */ -function parseSourceMapInput(str) { - return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, '')); -} -exports.parseSourceMapInput = parseSourceMapInput; - -/** - * Compute the URL of a source given the the source root, the source's - * URL, and the source map's URL. - */ -function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { - sourceURL = sourceURL || ''; - - if (sourceRoot) { - // This follows what Chrome does. - if (sourceRoot[sourceRoot.length - 1] !== '/' && sourceURL[0] !== '/') { - sourceRoot += '/'; - } - // The spec says: - // Line 4: An optional source root, useful for relocating source - // files on a server or removing repeated values in the - // “sources” entry. This value is prepended to the individual - // entries in the “source” field. - sourceURL = sourceRoot + sourceURL; - } - - // Historically, SourceMapConsumer did not take the sourceMapURL as - // a parameter. This mode is still somewhat supported, which is why - // this code block is conditional. However, it's preferable to pass - // the source map URL to SourceMapConsumer, so that this function - // can implement the source URL resolution algorithm as outlined in - // the spec. This block is basically the equivalent of: - // new URL(sourceURL, sourceMapURL).toString() - // ... except it avoids using URL, which wasn't available in the - // older releases of node still supported by this library. - // - // The spec says: - // If the sources are not absolute URLs after prepending of the - // “sourceRoot”, the sources are resolved relative to the - // SourceMap (like resolving script src in a html document). - if (sourceMapURL) { - var parsed = urlParse(sourceMapURL); - if (!parsed) { - throw new Error("sourceMapURL could not be parsed"); - } - if (parsed.path) { - // Strip the last path component, but keep the "/". - var index = parsed.path.lastIndexOf('/'); - if (index >= 0) { - parsed.path = parsed.path.substring(0, index + 1); - } - } - sourceURL = join(urlGenerate(parsed), sourceURL); - } - - return normalize(sourceURL); -} -exports.computeSourceURL = computeSourceURL; diff --git a/node_modules/source-map-js/package.json b/node_modules/source-map-js/package.json deleted file mode 100644 index f58dbeb..0000000 --- a/node_modules/source-map-js/package.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "name": "source-map-js", - "description": "Generates and consumes source maps", - "version": "1.2.1", - "homepage": "https://github.com/7rulnik/source-map-js", - "author": "Valentin 7rulnik Semirulnik ", - "contributors": [ - "Nick Fitzgerald ", - "Tobias Koppers ", - "Duncan Beevers ", - "Stephen Crane ", - "Ryan Seddon ", - "Miles Elam ", - "Mihai Bazon ", - "Michael Ficarra ", - "Todd Wolfson ", - "Alexander Solovyov ", - "Felix Gnass ", - "Conrad Irwin ", - "usrbincc ", - "David Glasser ", - "Chase Douglas ", - "Evan Wallace ", - "Heather Arthur ", - "Hugh Kennedy ", - "David Glasser ", - "Simon Lydell ", - "Jmeas Smith ", - "Michael Z Goddard ", - "azu ", - "John Gozde ", - "Adam Kirkton ", - "Chris Montgomery ", - "J. Ryan Stinnett ", - "Jack Herrington ", - "Chris Truter ", - "Daniel Espeset ", - "Jamie Wong ", - "Eddy Bruël ", - "Hawken Rives ", - "Gilad Peleg ", - "djchie ", - "Gary Ye ", - "Nicolas Lalevée " - ], - "repository": "7rulnik/source-map-js", - "main": "./source-map.js", - "files": [ - "source-map.js", - "source-map.d.ts", - "lib/" - ], - "engines": { - "node": ">=0.10.0" - }, - "license": "BSD-3-Clause", - "scripts": { - "test": "npm run build && node test/run-tests.js", - "build": "webpack --color", - "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" - }, - "devDependencies": { - "clean-publish": "^3.1.0", - "doctoc": "^0.15.0", - "webpack": "^1.12.0" - }, - "clean-publish": { - "cleanDocs": true - }, - "typings": "source-map.d.ts" -} diff --git a/node_modules/source-map-js/source-map.d.ts b/node_modules/source-map-js/source-map.d.ts deleted file mode 100644 index ec8892f..0000000 --- a/node_modules/source-map-js/source-map.d.ts +++ /dev/null @@ -1,104 +0,0 @@ -export interface StartOfSourceMap { - file?: string; - sourceRoot?: string; -} - -export interface RawSourceMap extends StartOfSourceMap { - version: string; - sources: string[]; - names: string[]; - sourcesContent?: string[]; - mappings: string; -} - -export interface Position { - line: number; - column: number; -} - -export interface LineRange extends Position { - lastColumn: number; -} - -export interface FindPosition extends Position { - // SourceMapConsumer.GREATEST_LOWER_BOUND or SourceMapConsumer.LEAST_UPPER_BOUND - bias?: number; -} - -export interface SourceFindPosition extends FindPosition { - source: string; -} - -export interface MappedPosition extends Position { - source: string; - name?: string; -} - -export interface MappingItem { - source: string | null; - generatedLine: number; - generatedColumn: number; - originalLine: number | null; - originalColumn: number | null; - name: string | null; -} - -export class SourceMapConsumer { - static GENERATED_ORDER: number; - static ORIGINAL_ORDER: number; - - static GREATEST_LOWER_BOUND: number; - static LEAST_UPPER_BOUND: number; - - constructor(rawSourceMap: RawSourceMap); - readonly file: string | undefined | null; - readonly sourceRoot: string | undefined | null; - readonly sourcesContent: readonly string[] | null | undefined; - readonly sources: readonly string[] - - computeColumnSpans(): void; - originalPositionFor(generatedPosition: FindPosition): MappedPosition; - generatedPositionFor(originalPosition: SourceFindPosition): LineRange; - allGeneratedPositionsFor(originalPosition: MappedPosition): Position[]; - hasContentsOfAllSources(): boolean; - sourceContentFor(source: string, returnNullOnMissing?: boolean): string | null; - eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void; -} - -export interface Mapping { - generated: Position; - original?: Position | null; - source?: string | null; - name?: string | null; -} - -export class SourceMapGenerator { - constructor(startOfSourceMap?: StartOfSourceMap); - static fromSourceMap(sourceMapConsumer: SourceMapConsumer, startOfSourceMap?: StartOfSourceMap): SourceMapGenerator; - addMapping(mapping: Mapping): void; - setSourceContent(sourceFile: string, sourceContent: string | null | undefined): void; - applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void; - toString(): string; - toJSON(): RawSourceMap; -} - -export interface CodeWithSourceMap { - code: string; - map: SourceMapGenerator; -} - -export class SourceNode { - constructor(); - constructor(line: number, column: number, source: string); - constructor(line: number, column: number, source: string, chunk?: string, name?: string); - static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode; - add(chunk: string): void; - prepend(chunk: string): void; - setSourceContent(sourceFile: string, sourceContent: string): void; - walk(fn: (chunk: string, mapping: MappedPosition) => void): void; - walkSourceContents(fn: (file: string, content: string) => void): void; - join(sep: string): SourceNode; - replaceRight(pattern: string, replacement: string): SourceNode; - toString(): string; - toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap; -} diff --git a/node_modules/source-map-js/source-map.js b/node_modules/source-map-js/source-map.js deleted file mode 100644 index bc88fe8..0000000 --- a/node_modules/source-map-js/source-map.js +++ /dev/null @@ -1,8 +0,0 @@ -/* - * Copyright 2009-2011 Mozilla Foundation and contributors - * Licensed under the New BSD license. See LICENSE.txt or: - * http://opensource.org/licenses/BSD-3-Clause - */ -exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; -exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; -exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/string-width/index.d.ts b/node_modules/string-width/index.d.ts deleted file mode 100644 index 12b5309..0000000 --- a/node_modules/string-width/index.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -declare const stringWidth: { - /** - Get the visual width of a string - the number of columns required to display it. - - Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. - - @example - ``` - import stringWidth = require('string-width'); - - stringWidth('a'); - //=> 1 - - stringWidth('古'); - //=> 2 - - stringWidth('\u001B[1m古\u001B[22m'); - //=> 2 - ``` - */ - (string: string): number; - - // TODO: remove this in the next major version, refactor the whole definition to: - // declare function stringWidth(string: string): number; - // export = stringWidth; - default: typeof stringWidth; -} - -export = stringWidth; diff --git a/node_modules/string-width/index.js b/node_modules/string-width/index.js deleted file mode 100644 index f4d261a..0000000 --- a/node_modules/string-width/index.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; -const stripAnsi = require('strip-ansi'); -const isFullwidthCodePoint = require('is-fullwidth-code-point'); -const emojiRegex = require('emoji-regex'); - -const stringWidth = string => { - if (typeof string !== 'string' || string.length === 0) { - return 0; - } - - string = stripAnsi(string); - - if (string.length === 0) { - return 0; - } - - string = string.replace(emojiRegex(), ' '); - - let width = 0; - - for (let i = 0; i < string.length; i++) { - const code = string.codePointAt(i); - - // Ignore control characters - if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) { - continue; - } - - // Ignore combining characters - if (code >= 0x300 && code <= 0x36F) { - continue; - } - - // Surrogates - if (code > 0xFFFF) { - i++; - } - - width += isFullwidthCodePoint(code) ? 2 : 1; - } - - return width; -}; - -module.exports = stringWidth; -// TODO: remove this in the next major version -module.exports.default = stringWidth; diff --git a/node_modules/string-width/license b/node_modules/string-width/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/string-width/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/package.json b/node_modules/string-width/package.json deleted file mode 100644 index 28ba7b4..0000000 --- a/node_modules/string-width/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "string-width", - "version": "4.2.3", - "description": "Get the visual width of a string - the number of columns required to display it", - "license": "MIT", - "repository": "sindresorhus/string-width", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "string", - "character", - "unicode", - "width", - "visual", - "column", - "columns", - "fullwidth", - "full-width", - "full", - "ansi", - "escape", - "codes", - "cli", - "command-line", - "terminal", - "console", - "cjk", - "chinese", - "japanese", - "korean", - "fixed-width" - ], - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/string-width/readme.md b/node_modules/string-width/readme.md deleted file mode 100644 index bdd3141..0000000 --- a/node_modules/string-width/readme.md +++ /dev/null @@ -1,50 +0,0 @@ -# string-width - -> Get the visual width of a string - the number of columns required to display it - -Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. - -Useful to be able to measure the actual width of command-line output. - - -## Install - -``` -$ npm install string-width -``` - - -## Usage - -```js -const stringWidth = require('string-width'); - -stringWidth('a'); -//=> 1 - -stringWidth('古'); -//=> 2 - -stringWidth('\u001B[1m古\u001B[22m'); -//=> 2 -``` - - -## Related - -- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module -- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string -- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string - - ---- - -
    - - Get professional support for this package with a Tidelift subscription - -
    - - Tidelift helps make open source sustainable for maintainers while giving companies
    assurances about security, maintenance, and licensing for their dependencies. -
    -
    diff --git a/node_modules/strip-ansi/index.d.ts b/node_modules/strip-ansi/index.d.ts deleted file mode 100644 index 907fccc..0000000 --- a/node_modules/strip-ansi/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** -Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string. - -@example -``` -import stripAnsi = require('strip-ansi'); - -stripAnsi('\u001B[4mUnicorn\u001B[0m'); -//=> 'Unicorn' - -stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); -//=> 'Click' -``` -*/ -declare function stripAnsi(string: string): string; - -export = stripAnsi; diff --git a/node_modules/strip-ansi/index.js b/node_modules/strip-ansi/index.js deleted file mode 100644 index 9a593df..0000000 --- a/node_modules/strip-ansi/index.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; -const ansiRegex = require('ansi-regex'); - -module.exports = string => typeof string === 'string' ? string.replace(ansiRegex(), '') : string; diff --git a/node_modules/strip-ansi/license b/node_modules/strip-ansi/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/strip-ansi/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-ansi/package.json b/node_modules/strip-ansi/package.json deleted file mode 100644 index 1a41108..0000000 --- a/node_modules/strip-ansi/package.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "name": "strip-ansi", - "version": "6.0.1", - "description": "Strip ANSI escape codes from a string", - "license": "MIT", - "repository": "chalk/strip-ansi", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "strip", - "trim", - "remove", - "ansi", - "styles", - "color", - "colour", - "colors", - "terminal", - "console", - "string", - "tty", - "escape", - "formatting", - "rgb", - "256", - "shell", - "xterm", - "log", - "logging", - "command-line", - "text" - ], - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "devDependencies": { - "ava": "^2.4.0", - "tsd": "^0.10.0", - "xo": "^0.25.3" - } -} diff --git a/node_modules/strip-ansi/readme.md b/node_modules/strip-ansi/readme.md deleted file mode 100644 index 7c4b56d..0000000 --- a/node_modules/strip-ansi/readme.md +++ /dev/null @@ -1,46 +0,0 @@ -# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) - -> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string - - -## Install - -``` -$ npm install strip-ansi -``` - - -## Usage - -```js -const stripAnsi = require('strip-ansi'); - -stripAnsi('\u001B[4mUnicorn\u001B[0m'); -//=> 'Unicorn' - -stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); -//=> 'Click' -``` - - -## strip-ansi for enterprise - -Available as part of the Tidelift Subscription. - -The maintainers of strip-ansi and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-strip-ansi?utm_source=npm-strip-ansi&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) - - -## Related - -- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module -- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Streaming version of this module -- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes -- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes -- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right - - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [Josh Junon](https://github.com/qix-) - diff --git a/node_modules/thenby/LICENSE.TXT b/node_modules/thenby/LICENSE.TXT deleted file mode 100644 index 8dada3e..0000000 --- a/node_modules/thenby/LICENSE.TXT +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/thenby/README.md b/node_modules/thenby/README.md deleted file mode 100644 index 7b7aac0..0000000 --- a/node_modules/thenby/README.md +++ /dev/null @@ -1,159 +0,0 @@ - - - - -- [thenBy.js usage](#thenbyjs-usage) - - [Sort by property names](#sort-by-property-names) - - [Sort by unary functions](#sort-by-unary-functions) - - [Extra options](#extra-options) - - [Sort descending](#sort-descending) - - [Case insensitive sorting](#case-insensitive-sorting) - - [Custom compare function](#custom-compare-function) - - [Internationalization: Using javascripts native `Intl.Collator`](#internationalization-using-javascripts-native-intlcollator) - - [A word on performance](#a-word-on-performance) - - [Installing](#installing) - - [Install in your HTML](#install-in-your-html) - - [Install using npm or yarn](#install-using-npm-or-yarn) - - - -# thenBy.js usage - - [![NPM Version][npm-image]][npm-url] - [![NPM Downloads][downloads-image]][downloads-url] - -`thenBy` is a javascript micro library that helps sorting arrays on multiple keys. It allows you to use the [native Array::sort() method](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort) of javascript, but pass in multiple functions to sort that are composed with `firstBy().thenBy().thenBy()` style. - -Example: -```javascript -// first by length of name, then by population, then by ID -data.sort( - firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }) - .thenBy(function (v1, v2) { return v1.population - v2.population; }) - .thenBy(function (v1, v2) { return v1.id - v2.id; }) -); -``` -`thenBy` also offers some nice shortcuts that make the most common ways of sorting even easier and more readable. - -### Sort by property names -Javascript sorting relies heavily on passing discriminator functions that return -1, 0 or 1 for a pair of items. While this is very flexible, often you want to sort on the value of a simple property. As a convenience, thenBy.js builds the appropriate compare function for you if you pass in a property name (instead of a function). The example above would then look like this: -```javascript -// first by length of name, then by population, then by ID -data.sort( - firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }) - .thenBy("population") - .thenBy("id") -); -``` - -If an element doesn't have the property defined, it will sort like the empty string (""). Typically, this will be at the top. - -### Sort by unary functions -You can also pass a function that takes a single item and returns its sorting key. This turns the above expression into: -```javascript -// first by length of name, then by population, then by ID -data.sort( - firstBy(function (v) { return v.name.length; }) - .thenBy("population") - .thenBy("id") -); -``` - -Note that javascript contains a number of standard functions that can be passed in here as well. The Number() function will make your sorting sort on numeric values instead of lexical values: -```javascript -var values = ["2", "20", "03", "-2", "0", 200, "2"]; -var sorted = values.sort(firstBy(Number)); -``` -## Extra options -### Sort descending -thenBy.js allows you to pass in a second parameter for `direction`. If you pass in 'desc' or -1, the sorting will be reversed. So: -```javascript -// first by length of name descending, then by population descending, then by ID ascending -data.sort( - firstBy(function (v1, v2) { return v1.name.length - v2.name.length; }, -1) - .thenBy("population", "desc") - .thenBy("id") -); -``` - -### Case insensitive sorting -(as of v1.2.0) All of the shortcut methods allow you to sort case insensitive as well. The second parameter expects an options object (if it is a number, it is interpreted as `direction` as above). The ignoreCase property can be set to true, like this: -```javascript -// first by name, case insensitive, then by population -data.sort( - firstBy("name", {ignoreCase:true}) - .thenBy("population") -); -``` -If you want to use both descending and ignoreCase, you have to use the options syntax for direction as well: -```javascript -// sort by name, case insensitive and descending -data.sort(firstBy("name", {ignoreCase:true, direction:"desc"})); -``` -### Custom compare function -If you have more specific wishes for the exact sort order, but still want to use the convenience of unary functions or sorting on property names, you can pass in you own compare function in the options. Here we use a compare function that known about the relative values of playing cards:: - -```javascript -const cards = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K', 'A']; -var cardCompare = (c1, c2) =>{ - return cards.indexOf(c1) - cards.indexOf(c2); -} -var handOfCards = [ - { id: 7, suit:"c", card:"A" }, - { id: 8, suit:"d", card:"10" }, - // etc - ]; -handOfCards.sort(firstBy("card", {cmp: cardCompare, direction: "desc"})); - -``` -You can use the `cmp` function together with `direction`, but not with `ignoreCase` (for obvious reasons). - -### Internationalization: Using javascripts native `Intl.Collator` -One of the more interesting custom compare functions you may want to pass in is the native `compare` function that is exposed by `Intl.Collator`. This compare function knows about the different sorting rules in different cultures. Many browsers have these implemented, but in NodeJS, the API is implemented, but only for the English culture. You would use it with thenBy like this: - -```javascript -// in German, ä sorts with a -var germanCompare = new Intl.Collator('de').compare; -// in Swedish, ä sorts after z -var swedishCompare = new Intl.Collator('sv').compare; -data.sort( - firstBy("name", {cmp: swedishCompare}) -); -``` -Check the [details on using Intl.Collator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Collator). - -## A word on performance -thenBy constructs a comparer function for you. It does this by combining the functions you pass in with a number of small utility functions that perform tasks like "reverting", "combining the current sort order with the previous one", etc. Also, these operations try to work correctly, no matter what content is in the sorted array. There are two steps here that cost time: constructing the über-function and running it. The construction time should always be negligible. The run time however can be slower than when you carefully handcraft the compare function. Still, *normally you shouldn't worry about this*, but if you're sorting very large sets, it could matter. For example, there is some overhead in making several small functions call each other instead of creating one piece of code. Also, if you know your data well, and know that a specific field is *alwways present* and is *always a number*, you could code a significantly faster compare function then thenBy's results. The unit tests contain an extreme example. - -If you use thenBy to combine multiple compare functions into one (where each function expects two parameters), the difference is small. Using unary functions adds some overhead, using direction:desc adds some, using only a property name adds a little, but will check for missing values, which could be optimized. Ignoring case will slow down, but not more so than when handcoded. - -## Installing -### Install in your HTML -To include it into your page/project, just paste the minified code from https://raw.github.com/Teun/thenBy.js/master/thenBy.min.js into yours (699 characters). If you don't want the `firstBy` function in your global namespace, you can assign it to a local variable (see sample.htm). - -### Install using npm or yarn -```npm install thenby``` - -or - -```yarn add thenby``` - -then in your app: - -```var firstBy = require('thenby');``` - -or in TypeScript/ES6: - -```import {firstBy} from "thenby";``` - -For a small demo of how TypeScript support looks in a good editor (i.e. VS Code), [check this short video](https://youtu.be/mKJovFLyxro). - - -Thanks a lot to [bergus](https://github.com/bergus), [hagabaka](https://github.com/hagabaka), [infolyzer](https://github.com/infolyzer) and [Foxhoundn](https://github.com/Foxhoundn) for their improvements. -Thanks to [jsgoupil](https://github.com/jsgoupil) and [HonoluluHenk](https://github.com/HonoluluHenk) for their help on the TypeScript declaration. - - -[npm-image]: https://img.shields.io/npm/v/thenby.svg -[npm-url]: https://npmjs.org/package/thenby -[downloads-image]: https://img.shields.io/npm/dm/thenby.svg -[downloads-url]: https://npmjs.org/package/thenby diff --git a/node_modules/thenby/package.json b/node_modules/thenby/package.json deleted file mode 100644 index 4efb912..0000000 --- a/node_modules/thenby/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "thenby", - "version": "1.3.4", - "description": "Micro library for sorting arrays using the firstBy().thenBy().thenBy() syntax", - "main": "thenBy.module.js", - "types": "thenBy.module.d.ts", - "repository": { - "type": "git", - "url": "https://github.com/Teun/thenBy.js.git" - }, - "keywords": [ - "sort", - "order", - "sorting", - "arrays", - "multiple" - ], - "files": [ - "thenBy.module.js", - "thenBy.module.d.ts", - "thenBy.min.js" - ], - "scripts": { - "test": "gulp", - "build": "gulp build" - }, - "author": "Teun Duynstee", - "license": "Apache-2.0", - "bugs": { - "url": "https://github.com/Teun/thenBy.js/issues" - }, - "homepage": "https://github.com/Teun/thenBy.js", - "devDependencies": { - "chai": "^3.5.0", - "gulp": "^4.0.2", - "gulp-insert": "^0.5.0", - "gulp-mocha": "^7.0.2", - "gulp-rename": "^1.2.2", - "gulp-replace": "^0.5.4", - "gulp-uglify": "^2.1.2", - "gulp-umd": "^2.0.0", - "performance-now": "^0.2.0" - }, - "dependencies": {} -} diff --git a/node_modules/thenby/thenBy.min.js b/node_modules/thenby/thenBy.min.js deleted file mode 100644 index ba9ef0b..0000000 --- a/node_modules/thenby/thenBy.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*** Copyright 2020 Teun Duynstee Licensed under the Apache License, Version 2.0 ***/ -!function(n,t){"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?module.exports=t():n.firstBy=t()}(this,function(){return function(){function n(n){return n}function t(n){return"string"==typeof n?n.toLowerCase():n}function e(e,r){if(r="object"==typeof r?r:{direction:r},"function"!=typeof e){var i=e;e=function(n){return n[i]?n[i]:""}}if(1===e.length){var o=e,f=r.ignoreCase?t:n,u=r.cmp||function(n,t){return nt?1:0};e=function(n,t){return u(f(o(n)),f(o(t)))}}const c={"-1":"",desc:""};return r.direction in c?function(n,t){return-e(n,t)}:e}function r(n,t){var i="function"==typeof this&&!this.firstBy&&this,o=e(n,t),f=i?function(n,t){return i(n,t)||o(n,t)}:o;return f.thenBy=r,f}return r.firstBy=r,r}()}); \ No newline at end of file diff --git a/node_modules/thenby/thenBy.module.d.ts b/node_modules/thenby/thenBy.module.d.ts deleted file mode 100644 index becb6d5..0000000 --- a/node_modules/thenby/thenBy.module.d.ts +++ /dev/null @@ -1,51 +0,0 @@ -// Type definitions for thenBy -// Definitions by: Teun Duynstee (with significant help from @HonoluluHenk) -type SortOrder = "asc" | "desc" | -1 | 1; -declare class opt { - direction?:SortOrder; - ignoreCase?:boolean; -} -declare class typedOpt extends opt { - cmp?: (a:T, b:T)=> number; -} -interface IThenBy { - (v1: T, v2: T) : number; - /** - * Full format to compare two elements and determine which sorts first. - * @param compare function that receives two values from the sorted array and returns a number indicating which comes first: < 0: first comes first, 0: doesn't matter, > 0: second comes first. - * @param direction can be used to reverse the sorting by passing -1 - **/ - thenBy(compare: ((v1: T, v2: T) => number), direction?: SortOrder | opt): IThenBy; - /** - * Shorthand for selecting a value to sort on from the sorted element. - * @param select function that receives a value from the sorted array and selects the thing to sort on - * @param direction reverse by passing -1. opt for other options - **/ - thenBy(select: ((v: T) => U), direction?: SortOrder | typedOpt): IThenBy; - /** - * Shorthand for sorting on a simple property. - * @param byPropertyName is the name of the property to sort on as a string - * @param direction reverse by passing -1. opt for other options - **/ - thenBy(byPropertyName: (keyof T), direction?: SortOrder | typedOpt): IThenBy; -} -declare module "thenby" { - /** - * Full format to compare two elements and determine which sorts first. - * @param compare function that receives two values from the sorted array and returns a number indicating which comes first: < 0: first comes first, 0: doesn't matter, > 0: second comes first. - * @param direction can be used to reverse the sorting by passing -1 - **/ - export function firstBy(compare: ((v1: T, v2: T) => number), direction?: SortOrder | opt): IThenBy; - /** - * Shorthand for selecting a value to sort on from the sorted element. - * @param select function that receives a value from the sorted array and selects the thing to sort on - * @param direction reverse by passing -1. opt for other options - **/ - export function firstBy(select: ((v: T) => U), direction?: SortOrder | typedOpt): IThenBy; - /** - * Shorthand for sorting on a simple property. - * @param byPropertyName is the name of the property to sort on as a string - * @param direction reverse by passing -1. opt for other options - **/ - export function firstBy(byPropertyName: (keyof T), direction?: SortOrder | typedOpt): IThenBy; - } diff --git a/node_modules/thenby/thenBy.module.js b/node_modules/thenby/thenBy.module.js deleted file mode 100644 index bd1dccb..0000000 --- a/node_modules/thenby/thenBy.module.js +++ /dev/null @@ -1,60 +0,0 @@ -/*** - Copyright 2013 Teun Duynstee - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -module.exports = (function() { - - function identity(v){return v;} - - function ignoreCase(v){return typeof(v)==="string" ? v.toLowerCase() : v;} - - function makeCompareFunction(f, opt){ - opt = typeof(opt)==="object" ? opt : {direction:opt}; - - if(typeof(f)!="function"){ - var prop = f; - // make unary function - f = function(v1){return !!v1[prop] ? v1[prop] : "";} - } - if(f.length === 1) { - // f is a unary function mapping a single item to its sort score - var uf = f; - var preprocess = opt.ignoreCase?ignoreCase:identity; - var cmp = opt.cmp || function(v1,v2) {return v1 < v2 ? -1 : v1 > v2 ? 1 : 0;} - f = function(v1,v2) {return cmp(preprocess(uf(v1)), preprocess(uf(v2)));} - } - const descTokens = {"-1":'', desc:''}; - if(opt.direction in descTokens) return function(v1,v2){return -f(v1,v2)}; - return f; - } - - /* adds a secondary compare function to the target function (`this` context) - which is applied in case the first one returns 0 (equal) - returns a new compare function, which has a `thenBy` method as well */ - function tb(func, opt) { - /* should get value false for the first call. This can be done by calling the - exported function, or the firstBy property on it (for es6 module compatibility) - */ - var x = (typeof(this) == "function" && !this.firstBy) ? this : false; - var y = makeCompareFunction(func, opt); - var f = x ? function(a, b) { - return x(a,b) || y(a,b); - } - : y; - f.thenBy = tb; - return f; - } - tb.firstBy = tb; - return tb; -})(); diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE deleted file mode 100644 index 7cccaf9..0000000 --- a/node_modules/to-regex-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md deleted file mode 100644 index 38887da..0000000 --- a/node_modules/to-regex-range/README.md +++ /dev/null @@ -1,305 +0,0 @@ -# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) - -> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save to-regex-range -``` - -
    -What does this do? - -
    - -This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. - -**Example** - -```js -const toRegexRange = require('to-regex-range'); -const regex = new RegExp(toRegexRange('15', '95')); -``` - -A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). - -
    - -
    - -
    -Why use this library? - -
    - -### Convenience - -Creating regular expressions for matching numbers gets deceptively complicated pretty fast. - -For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: - -* regex for matching `1` => `/1/` (easy enough) -* regex for matching `1` through `5` => `/[1-5]/` (not bad...) -* regex for matching `1` or `5` => `/(1|5)/` (still easy...) -* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) -* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) -* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) -* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) - -The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. - -**Learn more** - -If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. - -### Heavily tested - -As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. - -Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. - -### Optimized - -Generated regular expressions are optimized: - -* duplicate sequences and character classes are reduced using quantifiers -* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative -* uses fragment caching to avoid processing the same exact string more than once - -
    - -
    - -## Usage - -Add this library to your javascript application with the following line of code - -```js -const toRegexRange = require('to-regex-range'); -``` - -The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). - -```js -const source = toRegexRange('15', '95'); -//=> 1[5-9]|[2-8][0-9]|9[0-5] - -const regex = new RegExp(`^${source}$`); -console.log(regex.test('14')); //=> false -console.log(regex.test('50')); //=> true -console.log(regex.test('94')); //=> true -console.log(regex.test('96')); //=> false -``` - -## Options - -### options.capture - -**Type**: `boolean` - -**Deafault**: `undefined` - -Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. - -```js -console.log(toRegexRange('-10', '10')); -//=> -[1-9]|-?10|[0-9] - -console.log(toRegexRange('-10', '10', { capture: true })); -//=> (-[1-9]|-?10|[0-9]) -``` - -### options.shorthand - -**Type**: `boolean` - -**Deafault**: `undefined` - -Use the regex shorthand for `[0-9]`: - -```js -console.log(toRegexRange('0', '999999')); -//=> [0-9]|[1-9][0-9]{1,5} - -console.log(toRegexRange('0', '999999', { shorthand: true })); -//=> \d|[1-9]\d{1,5} -``` - -### options.relaxZeros - -**Type**: `boolean` - -**Default**: `true` - -This option relaxes matching for leading zeros when when ranges are zero-padded. - -```js -const source = toRegexRange('-0010', '0010'); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> true -console.log(regex.test('-010')); //=> true -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> true -console.log(regex.test('010')); //=> true -console.log(regex.test('0010')); //=> true -``` - -When `relaxZeros` is false, matching is strict: - -```js -const source = toRegexRange('-0010', '0010', { relaxZeros: false }); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> false -console.log(regex.test('-010')); //=> false -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> false -console.log(regex.test('010')); //=> false -console.log(regex.test('0010')); //=> true -``` - -## Examples - -| **Range** | **Result** | **Compile time** | -| --- | --- | --- | -| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | -| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | -| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | -| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | -| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | -| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | -| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | -| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | -| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | -| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | -| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | -| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | -| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | -| `toRegexRange(5, 5)` | `5` | _8μs_ | -| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | -| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | -| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | -| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | -| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | -| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | -| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | -| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | -| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | -| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | - -## Heads up! - -**Order of arguments** - -When the `min` is larger than the `max`, values will be flipped to create a valid range: - -```js -toRegexRange('51', '29'); -``` - -Is effectively flipped to: - -```js -toRegexRange('29', '51'); -//=> 29|[3-4][0-9]|5[0-1] -``` - -**Steps / increments** - -This library does not support steps (increments). A pr to add support would be welcome. - -## History - -### v2.0.0 - 2017-04-21 - -**New features** - -Adds support for zero-padding! - -### v1.0.0 - -**Optimizations** - -Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. - -## Attribution - -Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). - -## About - -
    -Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
    - -
    -Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
    - -
    -Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
    - -### Related projects - -You might also be interested in these projects: - -* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") -* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") -* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 63 | [jonschlinkert](https://github.com/jonschlinkert) | -| 3 | [doowb](https://github.com/doowb) | -| 2 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js deleted file mode 100644 index 77fbace..0000000 --- a/node_modules/to-regex-range/index.js +++ /dev/null @@ -1,288 +0,0 @@ -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -const isNumber = require('is-number'); - -const toRegexRange = (min, max, options) => { - if (isNumber(min) === false) { - throw new TypeError('toRegexRange: expected the first argument to be a number'); - } - - if (max === void 0 || min === max) { - return String(min); - } - - if (isNumber(max) === false) { - throw new TypeError('toRegexRange: expected the second argument to be a number.'); - } - - let opts = { relaxZeros: true, ...options }; - if (typeof opts.strictZeros === 'boolean') { - opts.relaxZeros = opts.strictZeros === false; - } - - let relax = String(opts.relaxZeros); - let shorthand = String(opts.shorthand); - let capture = String(opts.capture); - let wrap = String(opts.wrap); - let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; - - if (toRegexRange.cache.hasOwnProperty(cacheKey)) { - return toRegexRange.cache[cacheKey].result; - } - - let a = Math.min(min, max); - let b = Math.max(min, max); - - if (Math.abs(a - b) === 1) { - let result = min + '|' + max; - if (opts.capture) { - return `(${result})`; - } - if (opts.wrap === false) { - return result; - } - return `(?:${result})`; - } - - let isPadded = hasPadding(min) || hasPadding(max); - let state = { min, max, a, b }; - let positives = []; - let negatives = []; - - if (isPadded) { - state.isPadded = isPadded; - state.maxLen = String(state.max).length; - } - - if (a < 0) { - let newMin = b < 0 ? Math.abs(b) : 1; - negatives = splitToPatterns(newMin, Math.abs(a), state, opts); - a = state.a = 0; - } - - if (b >= 0) { - positives = splitToPatterns(a, b, state, opts); - } - - state.negatives = negatives; - state.positives = positives; - state.result = collatePatterns(negatives, positives, opts); - - if (opts.capture === true) { - state.result = `(${state.result})`; - } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { - state.result = `(?:${state.result})`; - } - - toRegexRange.cache[cacheKey] = state; - return state.result; -}; - -function collatePatterns(neg, pos, options) { - let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; - let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; - let intersected = filterPatterns(neg, pos, '-?', true, options) || []; - let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); - return subpatterns.join('|'); -} - -function splitToRanges(min, max) { - let nines = 1; - let zeros = 1; - - let stop = countNines(min, nines); - let stops = new Set([max]); - - while (min <= stop && stop <= max) { - stops.add(stop); - nines += 1; - stop = countNines(min, nines); - } - - stop = countZeros(max + 1, zeros) - 1; - - while (min < stop && stop <= max) { - stops.add(stop); - zeros += 1; - stop = countZeros(max + 1, zeros) - 1; - } - - stops = [...stops]; - stops.sort(compare); - return stops; -} - -/** - * Convert a range to a regex pattern - * @param {Number} `start` - * @param {Number} `stop` - * @return {String} - */ - -function rangeToPattern(start, stop, options) { - if (start === stop) { - return { pattern: start, count: [], digits: 0 }; - } - - let zipped = zip(start, stop); - let digits = zipped.length; - let pattern = ''; - let count = 0; - - for (let i = 0; i < digits; i++) { - let [startDigit, stopDigit] = zipped[i]; - - if (startDigit === stopDigit) { - pattern += startDigit; - - } else if (startDigit !== '0' || stopDigit !== '9') { - pattern += toCharacterClass(startDigit, stopDigit, options); - - } else { - count++; - } - } - - if (count) { - pattern += options.shorthand === true ? '\\d' : '[0-9]'; - } - - return { pattern, count: [count], digits }; -} - -function splitToPatterns(min, max, tok, options) { - let ranges = splitToRanges(min, max); - let tokens = []; - let start = min; - let prev; - - for (let i = 0; i < ranges.length; i++) { - let max = ranges[i]; - let obj = rangeToPattern(String(start), String(max), options); - let zeros = ''; - - if (!tok.isPadded && prev && prev.pattern === obj.pattern) { - if (prev.count.length > 1) { - prev.count.pop(); - } - - prev.count.push(obj.count[0]); - prev.string = prev.pattern + toQuantifier(prev.count); - start = max + 1; - continue; - } - - if (tok.isPadded) { - zeros = padZeros(max, tok, options); - } - - obj.string = zeros + obj.pattern + toQuantifier(obj.count); - tokens.push(obj); - start = max + 1; - prev = obj; - } - - return tokens; -} - -function filterPatterns(arr, comparison, prefix, intersection, options) { - let result = []; - - for (let ele of arr) { - let { string } = ele; - - // only push if _both_ are negative... - if (!intersection && !contains(comparison, 'string', string)) { - result.push(prefix + string); - } - - // or _both_ are positive - if (intersection && contains(comparison, 'string', string)) { - result.push(prefix + string); - } - } - return result; -} - -/** - * Zip strings - */ - -function zip(a, b) { - let arr = []; - for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); - return arr; -} - -function compare(a, b) { - return a > b ? 1 : b > a ? -1 : 0; -} - -function contains(arr, key, val) { - return arr.some(ele => ele[key] === val); -} - -function countNines(min, len) { - return Number(String(min).slice(0, -len) + '9'.repeat(len)); -} - -function countZeros(integer, zeros) { - return integer - (integer % Math.pow(10, zeros)); -} - -function toQuantifier(digits) { - let [start = 0, stop = ''] = digits; - if (stop || start > 1) { - return `{${start + (stop ? ',' + stop : '')}}`; - } - return ''; -} - -function toCharacterClass(a, b, options) { - return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; -} - -function hasPadding(str) { - return /^-?(0+)\d/.test(str); -} - -function padZeros(value, tok, options) { - if (!tok.isPadded) { - return value; - } - - let diff = Math.abs(tok.maxLen - String(value).length); - let relax = options.relaxZeros !== false; - - switch (diff) { - case 0: - return ''; - case 1: - return relax ? '0?' : '0'; - case 2: - return relax ? '0{0,2}' : '00'; - default: { - return relax ? `0{0,${diff}}` : `0{${diff}}`; - } - } -} - -/** - * Cache - */ - -toRegexRange.cache = {}; -toRegexRange.clearCache = () => (toRegexRange.cache = {}); - -/** - * Expose `toRegexRange` - */ - -module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json deleted file mode 100644 index 4ef194f..0000000 --- a/node_modules/to-regex-range/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "to-regex-range", - "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", - "version": "5.0.1", - "homepage": "https://github.com/micromatch/to-regex-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "micromatch/to-regex-range", - "bugs": { - "url": "https://github.com/micromatch/to-regex-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.0" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "is-number": "^7.0.0" - }, - "devDependencies": { - "fill-range": "^6.0.0", - "gulp-format-md": "^2.0.0", - "mocha": "^6.0.2", - "text-table": "^0.2.0", - "time-diff": "^0.3.1" - }, - "keywords": [ - "bash", - "date", - "expand", - "expansion", - "expression", - "glob", - "match", - "match date", - "match number", - "match numbers", - "match year", - "matches", - "matching", - "number", - "numbers", - "numerical", - "range", - "ranges", - "regex", - "regexp", - "regular", - "regular expression", - "sequence" - ], - "verb": { - "layout": "default", - "toc": false, - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "helpers": { - "examples": { - "displayName": "examples" - } - }, - "related": { - "list": [ - "expand-range", - "fill-range", - "micromatch", - "repeat-element", - "repeat-string" - ] - } - } -} diff --git a/node_modules/unicorn-magic/default.js b/node_modules/unicorn-magic/default.js deleted file mode 100644 index 7bd4088..0000000 --- a/node_modules/unicorn-magic/default.js +++ /dev/null @@ -1,14 +0,0 @@ -export async function delay({seconds, milliseconds} = {}) { - let duration; - if (typeof seconds === 'number') { - duration = seconds * 1000; - } else if (typeof milliseconds === 'number') { - duration = milliseconds; - } else { - throw new TypeError('Expected an object with either `seconds` or `milliseconds`.'); - } - - return new Promise(resolve => { - setTimeout(resolve, duration); - }); -} diff --git a/node_modules/unicorn-magic/index.d.ts b/node_modules/unicorn-magic/index.d.ts deleted file mode 100644 index 13e0f79..0000000 --- a/node_modules/unicorn-magic/index.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** -Delay the promise for the given duration. - -@example -``` -import {delay} from 'unicorn-magic'; - -await delay({seconds: 1}); - -console.log('1 second later'); -``` -*/ -export function delay(duration: {seconds: number} | {milliseconds: number}): Promise; - -/** -Convert a `URL` or path to a path. - -**Not available in browsers.** - -@example -``` -import path from 'node:path'; -import {toPath} from 'unicorn-magic'; - -// `cwd` can be `URL` or a path string. -const getUnicornPath = cwd => path.join(toPath(cwd), 'unicorn'); -``` -*/ -export function toPath(urlOrPath: URL | string): string; diff --git a/node_modules/unicorn-magic/license b/node_modules/unicorn-magic/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/unicorn-magic/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/unicorn-magic/node.js b/node_modules/unicorn-magic/node.js deleted file mode 100644 index 35fc81e..0000000 --- a/node_modules/unicorn-magic/node.js +++ /dev/null @@ -1,7 +0,0 @@ -import {fileURLToPath} from 'node:url'; - -export function toPath(urlOrPath) { - return urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath; -} - -export * from './default.js'; diff --git a/node_modules/unicorn-magic/package.json b/node_modules/unicorn-magic/package.json deleted file mode 100644 index 2a3efc6..0000000 --- a/node_modules/unicorn-magic/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "unicorn-magic", - "version": "0.1.0", - "description": "Some useful utilities I often need", - "license": "MIT", - "repository": "sindresorhus/unicorn-magic", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "type": "module", - "exports": { - "types": "./index.d.ts", - "node": "./node.js", - "default": "./default.js" - }, - "sideEffects": false, - "engines": { - "node": ">=18" - }, - "scripts": { - "test": "xo && ava && tsc index.d.ts" - }, - "files": [ - "node.js", - "default.js", - "index.d.ts" - ], - "keywords": [ - "utilities", - "util", - "extras", - "url", - "path", - "delay", - "wait", - "settimeout", - "sleep" - ], - "devDependencies": { - "ava": "^5.3.1", - "in-range": "^3.0.0", - "time-span": "^5.1.0", - "typescript": "^5.2.2", - "xo": "^0.56.0" - } -} diff --git a/node_modules/unicorn-magic/readme.md b/node_modules/unicorn-magic/readme.md deleted file mode 100644 index 633fc40..0000000 --- a/node_modules/unicorn-magic/readme.md +++ /dev/null @@ -1,25 +0,0 @@ -# unicorn-magic - -> Some useful utilities I often need - -*I'm not accepting requests.* - -## Install - -```sh -npm install unicorn-magic -``` - -## Usage - -```js -import {delay} from 'unicorn-magic'; - -await delay({seconds: 1}); - -console.log('1 second later'); -``` - -## API - -See [the types](index.d.ts). diff --git a/node_modules/universalify/LICENSE b/node_modules/universalify/LICENSE deleted file mode 100644 index 514e84e..0000000 --- a/node_modules/universalify/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -(The MIT License) - -Copyright (c) 2017, Ryan Zimmerman - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the 'Software'), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/universalify/README.md b/node_modules/universalify/README.md deleted file mode 100644 index 1184939..0000000 --- a/node_modules/universalify/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# universalify - -![GitHub Workflow Status (branch)](https://img.shields.io/github/actions/workflow/status/RyanZim/universalify/ci.yml?branch=master) -![Coveralls github branch](https://img.shields.io/coveralls/github/RyanZim/universalify/master.svg) -![npm](https://img.shields.io/npm/dm/universalify.svg) -![npm](https://img.shields.io/npm/l/universalify.svg) - -Make a callback- or promise-based function support both promises and callbacks. - -Uses the native promise implementation. - -## Installation - -```bash -npm install universalify -``` - -## API - -### `universalify.fromCallback(fn)` - -Takes a callback-based function to universalify, and returns the universalified function. - -Function must take a callback as the last parameter that will be called with the signature `(error, result)`. `universalify` does not support calling the callback with three or more arguments, and does not ensure that the callback is only called once. - -```js -function callbackFn (n, cb) { - setTimeout(() => cb(null, n), 15) -} - -const fn = universalify.fromCallback(callbackFn) - -// Works with Promises: -fn('Hello World!') -.then(result => console.log(result)) // -> Hello World! -.catch(error => console.error(error)) - -// Works with Callbacks: -fn('Hi!', (error, result) => { - if (error) return console.error(error) - console.log(result) - // -> Hi! -}) -``` - -### `universalify.fromPromise(fn)` - -Takes a promise-based function to universalify, and returns the universalified function. - -Function must return a valid JS promise. `universalify` does not ensure that a valid promise is returned. - -```js -function promiseFn (n) { - return new Promise(resolve => { - setTimeout(() => resolve(n), 15) - }) -} - -const fn = universalify.fromPromise(promiseFn) - -// Works with Promises: -fn('Hello World!') -.then(result => console.log(result)) // -> Hello World! -.catch(error => console.error(error)) - -// Works with Callbacks: -fn('Hi!', (error, result) => { - if (error) return console.error(error) - console.log(result) - // -> Hi! -}) -``` - -## License - -MIT diff --git a/node_modules/universalify/index.js b/node_modules/universalify/index.js deleted file mode 100644 index 233beac..0000000 --- a/node_modules/universalify/index.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict' - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - args.push((err, res) => (err != null) ? reject(err) : resolve(res)) - fn.apply(this, args) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else { - args.pop() - fn.apply(this, args).then(r => cb(null, r), cb) - } - }, 'name', { value: fn.name }) -} diff --git a/node_modules/universalify/package.json b/node_modules/universalify/package.json deleted file mode 100644 index d60fccb..0000000 --- a/node_modules/universalify/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "universalify", - "version": "2.0.1", - "description": "Make a callback- or promise-based function support both promises and callbacks.", - "keywords": [ - "callback", - "native", - "promise" - ], - "homepage": "https://github.com/RyanZim/universalify#readme", - "bugs": "https://github.com/RyanZim/universalify/issues", - "license": "MIT", - "author": "Ryan Zimmerman ", - "files": [ - "index.js" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/RyanZim/universalify.git" - }, - "scripts": { - "test": "standard && nyc --reporter text --reporter lcovonly tape test/*.js | colortape" - }, - "devDependencies": { - "colortape": "^0.1.2", - "coveralls": "^3.0.1", - "nyc": "^15.0.0", - "standard": "^14.3.1", - "tape": "^5.0.1" - }, - "engines": { - "node": ">= 10.0.0" - } -} diff --git a/node_modules/wrap-ansi/index.js b/node_modules/wrap-ansi/index.js deleted file mode 100755 index d502255..0000000 --- a/node_modules/wrap-ansi/index.js +++ /dev/null @@ -1,216 +0,0 @@ -'use strict'; -const stringWidth = require('string-width'); -const stripAnsi = require('strip-ansi'); -const ansiStyles = require('ansi-styles'); - -const ESCAPES = new Set([ - '\u001B', - '\u009B' -]); - -const END_CODE = 39; - -const ANSI_ESCAPE_BELL = '\u0007'; -const ANSI_CSI = '['; -const ANSI_OSC = ']'; -const ANSI_SGR_TERMINATOR = 'm'; -const ANSI_ESCAPE_LINK = `${ANSI_OSC}8;;`; - -const wrapAnsi = code => `${ESCAPES.values().next().value}${ANSI_CSI}${code}${ANSI_SGR_TERMINATOR}`; -const wrapAnsiHyperlink = uri => `${ESCAPES.values().next().value}${ANSI_ESCAPE_LINK}${uri}${ANSI_ESCAPE_BELL}`; - -// Calculate the length of words split on ' ', ignoring -// the extra characters added by ansi escape codes -const wordLengths = string => string.split(' ').map(character => stringWidth(character)); - -// Wrap a long word across multiple rows -// Ansi escape codes do not count towards length -const wrapWord = (rows, word, columns) => { - const characters = [...word]; - - let isInsideEscape = false; - let isInsideLinkEscape = false; - let visible = stringWidth(stripAnsi(rows[rows.length - 1])); - - for (const [index, character] of characters.entries()) { - const characterLength = stringWidth(character); - - if (visible + characterLength <= columns) { - rows[rows.length - 1] += character; - } else { - rows.push(character); - visible = 0; - } - - if (ESCAPES.has(character)) { - isInsideEscape = true; - isInsideLinkEscape = characters.slice(index + 1).join('').startsWith(ANSI_ESCAPE_LINK); - } - - if (isInsideEscape) { - if (isInsideLinkEscape) { - if (character === ANSI_ESCAPE_BELL) { - isInsideEscape = false; - isInsideLinkEscape = false; - } - } else if (character === ANSI_SGR_TERMINATOR) { - isInsideEscape = false; - } - - continue; - } - - visible += characterLength; - - if (visible === columns && index < characters.length - 1) { - rows.push(''); - visible = 0; - } - } - - // It's possible that the last row we copy over is only - // ansi escape characters, handle this edge-case - if (!visible && rows[rows.length - 1].length > 0 && rows.length > 1) { - rows[rows.length - 2] += rows.pop(); - } -}; - -// Trims spaces from a string ignoring invisible sequences -const stringVisibleTrimSpacesRight = string => { - const words = string.split(' '); - let last = words.length; - - while (last > 0) { - if (stringWidth(words[last - 1]) > 0) { - break; - } - - last--; - } - - if (last === words.length) { - return string; - } - - return words.slice(0, last).join(' ') + words.slice(last).join(''); -}; - -// The wrap-ansi module can be invoked in either 'hard' or 'soft' wrap mode -// -// 'hard' will never allow a string to take up more than columns characters -// -// 'soft' allows long words to expand past the column length -const exec = (string, columns, options = {}) => { - if (options.trim !== false && string.trim() === '') { - return ''; - } - - let returnValue = ''; - let escapeCode; - let escapeUrl; - - const lengths = wordLengths(string); - let rows = ['']; - - for (const [index, word] of string.split(' ').entries()) { - if (options.trim !== false) { - rows[rows.length - 1] = rows[rows.length - 1].trimStart(); - } - - let rowLength = stringWidth(rows[rows.length - 1]); - - if (index !== 0) { - if (rowLength >= columns && (options.wordWrap === false || options.trim === false)) { - // If we start with a new word but the current row length equals the length of the columns, add a new row - rows.push(''); - rowLength = 0; - } - - if (rowLength > 0 || options.trim === false) { - rows[rows.length - 1] += ' '; - rowLength++; - } - } - - // In 'hard' wrap mode, the length of a line is never allowed to extend past 'columns' - if (options.hard && lengths[index] > columns) { - const remainingColumns = (columns - rowLength); - const breaksStartingThisLine = 1 + Math.floor((lengths[index] - remainingColumns - 1) / columns); - const breaksStartingNextLine = Math.floor((lengths[index] - 1) / columns); - if (breaksStartingNextLine < breaksStartingThisLine) { - rows.push(''); - } - - wrapWord(rows, word, columns); - continue; - } - - if (rowLength + lengths[index] > columns && rowLength > 0 && lengths[index] > 0) { - if (options.wordWrap === false && rowLength < columns) { - wrapWord(rows, word, columns); - continue; - } - - rows.push(''); - } - - if (rowLength + lengths[index] > columns && options.wordWrap === false) { - wrapWord(rows, word, columns); - continue; - } - - rows[rows.length - 1] += word; - } - - if (options.trim !== false) { - rows = rows.map(stringVisibleTrimSpacesRight); - } - - const pre = [...rows.join('\n')]; - - for (const [index, character] of pre.entries()) { - returnValue += character; - - if (ESCAPES.has(character)) { - const {groups} = new RegExp(`(?:\\${ANSI_CSI}(?\\d+)m|\\${ANSI_ESCAPE_LINK}(?.*)${ANSI_ESCAPE_BELL})`).exec(pre.slice(index).join('')) || {groups: {}}; - if (groups.code !== undefined) { - const code = Number.parseFloat(groups.code); - escapeCode = code === END_CODE ? undefined : code; - } else if (groups.uri !== undefined) { - escapeUrl = groups.uri.length === 0 ? undefined : groups.uri; - } - } - - const code = ansiStyles.codes.get(Number(escapeCode)); - - if (pre[index + 1] === '\n') { - if (escapeUrl) { - returnValue += wrapAnsiHyperlink(''); - } - - if (escapeCode && code) { - returnValue += wrapAnsi(code); - } - } else if (character === '\n') { - if (escapeCode && code) { - returnValue += wrapAnsi(escapeCode); - } - - if (escapeUrl) { - returnValue += wrapAnsiHyperlink(escapeUrl); - } - } - } - - return returnValue; -}; - -// For each newline, invoke the method separately -module.exports = (string, columns, options) => { - return String(string) - .normalize() - .replace(/\r\n/g, '\n') - .split('\n') - .map(line => exec(line, columns, options)) - .join('\n'); -}; diff --git a/node_modules/wrap-ansi/license b/node_modules/wrap-ansi/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/wrap-ansi/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/wrap-ansi/package.json b/node_modules/wrap-ansi/package.json deleted file mode 100644 index dfb2f4f..0000000 --- a/node_modules/wrap-ansi/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "name": "wrap-ansi", - "version": "7.0.0", - "description": "Wordwrap a string with ANSI escape codes", - "license": "MIT", - "repository": "chalk/wrap-ansi", - "funding": "https://github.com/chalk/wrap-ansi?sponsor=1", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=10" - }, - "scripts": { - "test": "xo && nyc ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "wrap", - "break", - "wordwrap", - "wordbreak", - "linewrap", - "ansi", - "styles", - "color", - "colour", - "colors", - "terminal", - "console", - "cli", - "string", - "tty", - "escape", - "formatting", - "rgb", - "256", - "shell", - "xterm", - "log", - "logging", - "command-line", - "text" - ], - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "devDependencies": { - "ava": "^2.1.0", - "chalk": "^4.0.0", - "coveralls": "^3.0.3", - "has-ansi": "^4.0.0", - "nyc": "^15.0.1", - "xo": "^0.29.1" - } -} diff --git a/node_modules/wrap-ansi/readme.md b/node_modules/wrap-ansi/readme.md deleted file mode 100644 index 68779ba..0000000 --- a/node_modules/wrap-ansi/readme.md +++ /dev/null @@ -1,91 +0,0 @@ -# wrap-ansi [![Build Status](https://travis-ci.com/chalk/wrap-ansi.svg?branch=master)](https://travis-ci.com/chalk/wrap-ansi) [![Coverage Status](https://coveralls.io/repos/github/chalk/wrap-ansi/badge.svg?branch=master)](https://coveralls.io/github/chalk/wrap-ansi?branch=master) - -> Wordwrap a string with [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) - -## Install - -``` -$ npm install wrap-ansi -``` - -## Usage - -```js -const chalk = require('chalk'); -const wrapAnsi = require('wrap-ansi'); - -const input = 'The quick brown ' + chalk.red('fox jumped over ') + - 'the lazy ' + chalk.green('dog and then ran away with the unicorn.'); - -console.log(wrapAnsi(input, 20)); -``` - - - -## API - -### wrapAnsi(string, columns, options?) - -Wrap words to the specified column width. - -#### string - -Type: `string` - -String with ANSI escape codes. Like one styled by [`chalk`](https://github.com/chalk/chalk). Newline characters will be normalized to `\n`. - -#### columns - -Type: `number` - -Number of columns to wrap the text to. - -#### options - -Type: `object` - -##### hard - -Type: `boolean`\ -Default: `false` - -By default the wrap is soft, meaning long words may extend past the column width. Setting this to `true` will make it hard wrap at the column width. - -##### wordWrap - -Type: `boolean`\ -Default: `true` - -By default, an attempt is made to split words at spaces, ensuring that they don't extend past the configured columns. If wordWrap is `false`, each column will instead be completely filled splitting words as necessary. - -##### trim - -Type: `boolean`\ -Default: `true` - -Whitespace on all lines is removed by default. Set this option to `false` if you don't want to trim. - -## Related - -- [slice-ansi](https://github.com/chalk/slice-ansi) - Slice a string with ANSI escape codes -- [cli-truncate](https://github.com/sindresorhus/cli-truncate) - Truncate a string to a specific width in the terminal -- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right -- [jsesc](https://github.com/mathiasbynens/jsesc) - Generate ASCII-only output from Unicode strings. Useful for creating test fixtures. - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [Josh Junon](https://github.com/qix-) -- [Benjamin Coe](https://github.com/bcoe) - ---- - -
    - - Get professional support for this package with a Tidelift subscription - -
    - - Tidelift helps make open source sustainable for maintainers while giving companies
    assurances about security, maintenance, and licensing for their dependencies. -
    -
    diff --git a/node_modules/y18n/CHANGELOG.md b/node_modules/y18n/CHANGELOG.md deleted file mode 100644 index 244d838..0000000 --- a/node_modules/y18n/CHANGELOG.md +++ /dev/null @@ -1,100 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -### [5.0.8](https://www.github.com/yargs/y18n/compare/v5.0.7...v5.0.8) (2021-04-07) - - -### Bug Fixes - -* **deno:** force modern release for Deno ([b1c215a](https://www.github.com/yargs/y18n/commit/b1c215aed714bee5830e76de3e335504dc2c4dab)) - -### [5.0.7](https://www.github.com/yargs/y18n/compare/v5.0.6...v5.0.7) (2021-04-07) - - -### Bug Fixes - -* **deno:** force release for deno ([#121](https://www.github.com/yargs/y18n/issues/121)) ([d3f2560](https://www.github.com/yargs/y18n/commit/d3f2560e6cedf2bfa2352e9eec044da53f9a06b2)) - -### [5.0.6](https://www.github.com/yargs/y18n/compare/v5.0.5...v5.0.6) (2021-04-05) - - -### Bug Fixes - -* **webpack:** skip readFileSync if not defined ([#117](https://www.github.com/yargs/y18n/issues/117)) ([6966fa9](https://www.github.com/yargs/y18n/commit/6966fa91d2881cc6a6c531e836099e01f4da1616)) - -### [5.0.5](https://www.github.com/yargs/y18n/compare/v5.0.4...v5.0.5) (2020-10-25) - - -### Bug Fixes - -* address prototype pollution issue ([#108](https://www.github.com/yargs/y18n/issues/108)) ([a9ac604](https://www.github.com/yargs/y18n/commit/a9ac604abf756dec9687be3843e2c93bfe581f25)) - -### [5.0.4](https://www.github.com/yargs/y18n/compare/v5.0.3...v5.0.4) (2020-10-16) - - -### Bug Fixes - -* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#105](https://www.github.com/yargs/y18n/issues/105)) ([4f85d80](https://www.github.com/yargs/y18n/commit/4f85d80dbaae6d2c7899ae394f7ad97805df4886)) - -### [5.0.3](https://www.github.com/yargs/y18n/compare/v5.0.2...v5.0.3) (2020-10-16) - - -### Bug Fixes - -* **exports:** node 13.0-13.6 require a string fallback ([#103](https://www.github.com/yargs/y18n/issues/103)) ([e39921e](https://www.github.com/yargs/y18n/commit/e39921e1017f88f5d8ea97ddea854ffe92d68e74)) - -### [5.0.2](https://www.github.com/yargs/y18n/compare/v5.0.1...v5.0.2) (2020-10-01) - - -### Bug Fixes - -* **deno:** update types for deno ^1.4.0 ([#100](https://www.github.com/yargs/y18n/issues/100)) ([3834d9a](https://www.github.com/yargs/y18n/commit/3834d9ab1332f2937c935ada5e76623290efae81)) - -### [5.0.1](https://www.github.com/yargs/y18n/compare/v5.0.0...v5.0.1) (2020-09-05) - - -### Bug Fixes - -* main had old index path ([#98](https://www.github.com/yargs/y18n/issues/98)) ([124f7b0](https://www.github.com/yargs/y18n/commit/124f7b047ba9596bdbdf64459988304e77f3de1b)) - -## [5.0.0](https://www.github.com/yargs/y18n/compare/v4.0.0...v5.0.0) (2020-09-05) - - -### ⚠ BREAKING CHANGES - -* exports maps are now used, which modifies import behavior. -* drops Node 6 and 4. begin following Node.js LTS schedule (#89) - -### Features - -* add support for ESM and Deno [#95](https://www.github.com/yargs/y18n/issues/95)) ([4d7ae94](https://www.github.com/yargs/y18n/commit/4d7ae94bcb42e84164e2180366474b1cd321ed94)) - - -### Build System - -* drops Node 6 and 4. begin following Node.js LTS schedule ([#89](https://www.github.com/yargs/y18n/issues/89)) ([3cc0c28](https://www.github.com/yargs/y18n/commit/3cc0c287240727b84eaf1927f903612ec80f5e43)) - -### 4.0.1 (2020-10-25) - - -### Bug Fixes - -* address prototype pollution issue ([#108](https://www.github.com/yargs/y18n/issues/108)) ([a9ac604](https://www.github.com/yargs/y18n/commit/7de58ca0d315990cdb38234e97fc66254cdbcd71)) - -## [4.0.0](https://github.com/yargs/y18n/compare/v3.2.1...v4.0.0) (2017-10-10) - - -### Bug Fixes - -* allow support for falsy values like 0 in tagged literal ([#45](https://github.com/yargs/y18n/issues/45)) ([c926123](https://github.com/yargs/y18n/commit/c926123)) - - -### Features - -* **__:** added tagged template literal support ([#44](https://github.com/yargs/y18n/issues/44)) ([0598daf](https://github.com/yargs/y18n/commit/0598daf)) - - -### BREAKING CHANGES - -* **__:** dropping Node 0.10/Node 0.12 support diff --git a/node_modules/y18n/LICENSE b/node_modules/y18n/LICENSE deleted file mode 100644 index 3c157f0..0000000 --- a/node_modules/y18n/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015, Contributors - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/node_modules/y18n/README.md b/node_modules/y18n/README.md deleted file mode 100644 index 5102bb1..0000000 --- a/node_modules/y18n/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# y18n - -[![NPM version][npm-image]][npm-url] -[![js-standard-style][standard-image]][standard-url] -[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) - -The bare-bones internationalization library used by yargs. - -Inspired by [i18n](https://www.npmjs.com/package/i18n). - -## Examples - -_simple string translation:_ - -```js -const __ = require('y18n')().__; - -console.log(__('my awesome string %s', 'foo')); -``` - -output: - -`my awesome string foo` - -_using tagged template literals_ - -```js -const __ = require('y18n')().__; - -const str = 'foo'; - -console.log(__`my awesome string ${str}`); -``` - -output: - -`my awesome string foo` - -_pluralization support:_ - -```js -const __n = require('y18n')().__n; - -console.log(__n('one fish %s', '%d fishes %s', 2, 'foo')); -``` - -output: - -`2 fishes foo` - -## Deno Example - -As of `v5` `y18n` supports [Deno](https://github.com/denoland/deno): - -```typescript -import y18n from "https://deno.land/x/y18n/deno.ts"; - -const __ = y18n({ - locale: 'pirate', - directory: './test/locales' -}).__ - -console.info(__`Hi, ${'Ben'} ${'Coe'}!`) -``` - -You will need to run with `--allow-read` to load alternative locales. - -## JSON Language Files - -The JSON language files should be stored in a `./locales` folder. -File names correspond to locales, e.g., `en.json`, `pirate.json`. - -When strings are observed for the first time they will be -added to the JSON file corresponding to the current locale. - -## Methods - -### require('y18n')(config) - -Create an instance of y18n with the config provided, options include: - -* `directory`: the locale directory, default `./locales`. -* `updateFiles`: should newly observed strings be updated in file, default `true`. -* `locale`: what locale should be used. -* `fallbackToLanguage`: should fallback to a language-only file (e.g. `en.json`) - be allowed if a file matching the locale does not exist (e.g. `en_US.json`), - default `true`. - -### y18n.\_\_(str, arg, arg, arg) - -Print a localized string, `%s` will be replaced with `arg`s. - -This function can also be used as a tag for a template literal. You can use it -like this: __`hello ${'world'}`. This will be equivalent to -`__('hello %s', 'world')`. - -### y18n.\_\_n(singularString, pluralString, count, arg, arg, arg) - -Print a localized string with appropriate pluralization. If `%d` is provided -in the string, the `count` will replace this placeholder. - -### y18n.setLocale(str) - -Set the current locale being used. - -### y18n.getLocale() - -What locale is currently being used? - -### y18n.updateLocale(obj) - -Update the current locale with the key value pairs in `obj`. - -## Supported Node.js Versions - -Libraries in this ecosystem make a best effort to track -[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a -post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a). - -## License - -ISC - -[npm-url]: https://npmjs.org/package/y18n -[npm-image]: https://img.shields.io/npm/v/y18n.svg -[standard-image]: https://img.shields.io/badge/code%20style-standard-brightgreen.svg -[standard-url]: https://github.com/feross/standard diff --git a/node_modules/y18n/build/index.cjs b/node_modules/y18n/build/index.cjs deleted file mode 100644 index b2731e1..0000000 --- a/node_modules/y18n/build/index.cjs +++ /dev/null @@ -1,203 +0,0 @@ -'use strict'; - -var fs = require('fs'); -var util = require('util'); -var path = require('path'); - -let shim; -class Y18N { - constructor(opts) { - // configurable options. - opts = opts || {}; - this.directory = opts.directory || './locales'; - this.updateFiles = typeof opts.updateFiles === 'boolean' ? opts.updateFiles : true; - this.locale = opts.locale || 'en'; - this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true; - // internal stuff. - this.cache = Object.create(null); - this.writeQueue = []; - } - __(...args) { - if (typeof arguments[0] !== 'string') { - return this._taggedLiteral(arguments[0], ...arguments); - } - const str = args.shift(); - let cb = function () { }; // start with noop. - if (typeof args[args.length - 1] === 'function') - cb = args.pop(); - cb = cb || function () { }; // noop. - if (!this.cache[this.locale]) - this._readLocaleFile(); - // we've observed a new string, update the language file. - if (!this.cache[this.locale][str] && this.updateFiles) { - this.cache[this.locale][str] = str; - // include the current directory and locale, - // since these values could change before the - // write is performed. - this._enqueueWrite({ - directory: this.directory, - locale: this.locale, - cb - }); - } - else { - cb(); - } - return shim.format.apply(shim.format, [this.cache[this.locale][str] || str].concat(args)); - } - __n() { - const args = Array.prototype.slice.call(arguments); - const singular = args.shift(); - const plural = args.shift(); - const quantity = args.shift(); - let cb = function () { }; // start with noop. - if (typeof args[args.length - 1] === 'function') - cb = args.pop(); - if (!this.cache[this.locale]) - this._readLocaleFile(); - let str = quantity === 1 ? singular : plural; - if (this.cache[this.locale][singular]) { - const entry = this.cache[this.locale][singular]; - str = entry[quantity === 1 ? 'one' : 'other']; - } - // we've observed a new string, update the language file. - if (!this.cache[this.locale][singular] && this.updateFiles) { - this.cache[this.locale][singular] = { - one: singular, - other: plural - }; - // include the current directory and locale, - // since these values could change before the - // write is performed. - this._enqueueWrite({ - directory: this.directory, - locale: this.locale, - cb - }); - } - else { - cb(); - } - // if a %d placeholder is provided, add quantity - // to the arguments expanded by util.format. - const values = [str]; - if (~str.indexOf('%d')) - values.push(quantity); - return shim.format.apply(shim.format, values.concat(args)); - } - setLocale(locale) { - this.locale = locale; - } - getLocale() { - return this.locale; - } - updateLocale(obj) { - if (!this.cache[this.locale]) - this._readLocaleFile(); - for (const key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - this.cache[this.locale][key] = obj[key]; - } - } - } - _taggedLiteral(parts, ...args) { - let str = ''; - parts.forEach(function (part, i) { - const arg = args[i + 1]; - str += part; - if (typeof arg !== 'undefined') { - str += '%s'; - } - }); - return this.__.apply(this, [str].concat([].slice.call(args, 1))); - } - _enqueueWrite(work) { - this.writeQueue.push(work); - if (this.writeQueue.length === 1) - this._processWriteQueue(); - } - _processWriteQueue() { - const _this = this; - const work = this.writeQueue[0]; - // destructure the enqueued work. - const directory = work.directory; - const locale = work.locale; - const cb = work.cb; - const languageFile = this._resolveLocaleFile(directory, locale); - const serializedLocale = JSON.stringify(this.cache[locale], null, 2); - shim.fs.writeFile(languageFile, serializedLocale, 'utf-8', function (err) { - _this.writeQueue.shift(); - if (_this.writeQueue.length > 0) - _this._processWriteQueue(); - cb(err); - }); - } - _readLocaleFile() { - let localeLookup = {}; - const languageFile = this._resolveLocaleFile(this.directory, this.locale); - try { - // When using a bundler such as webpack, readFileSync may not be defined: - if (shim.fs.readFileSync) { - localeLookup = JSON.parse(shim.fs.readFileSync(languageFile, 'utf-8')); - } - } - catch (err) { - if (err instanceof SyntaxError) { - err.message = 'syntax error in ' + languageFile; - } - if (err.code === 'ENOENT') - localeLookup = {}; - else - throw err; - } - this.cache[this.locale] = localeLookup; - } - _resolveLocaleFile(directory, locale) { - let file = shim.resolve(directory, './', locale + '.json'); - if (this.fallbackToLanguage && !this._fileExistsSync(file) && ~locale.lastIndexOf('_')) { - // attempt fallback to language only - const languageFile = shim.resolve(directory, './', locale.split('_')[0] + '.json'); - if (this._fileExistsSync(languageFile)) - file = languageFile; - } - return file; - } - _fileExistsSync(file) { - return shim.exists(file); - } -} -function y18n$1(opts, _shim) { - shim = _shim; - const y18n = new Y18N(opts); - return { - __: y18n.__.bind(y18n), - __n: y18n.__n.bind(y18n), - setLocale: y18n.setLocale.bind(y18n), - getLocale: y18n.getLocale.bind(y18n), - updateLocale: y18n.updateLocale.bind(y18n), - locale: y18n.locale - }; -} - -var nodePlatformShim = { - fs: { - readFileSync: fs.readFileSync, - writeFile: fs.writeFile - }, - format: util.format, - resolve: path.resolve, - exists: (file) => { - try { - return fs.statSync(file).isFile(); - } - catch (err) { - return false; - } - } -}; - -const y18n = (opts) => { - return y18n$1(opts, nodePlatformShim); -}; - -module.exports = y18n; diff --git a/node_modules/y18n/build/lib/cjs.js b/node_modules/y18n/build/lib/cjs.js deleted file mode 100644 index ff58470..0000000 --- a/node_modules/y18n/build/lib/cjs.js +++ /dev/null @@ -1,6 +0,0 @@ -import { y18n as _y18n } from './index.js'; -import nodePlatformShim from './platform-shims/node.js'; -const y18n = (opts) => { - return _y18n(opts, nodePlatformShim); -}; -export default y18n; diff --git a/node_modules/y18n/build/lib/index.js b/node_modules/y18n/build/lib/index.js deleted file mode 100644 index e38f335..0000000 --- a/node_modules/y18n/build/lib/index.js +++ /dev/null @@ -1,174 +0,0 @@ -let shim; -class Y18N { - constructor(opts) { - // configurable options. - opts = opts || {}; - this.directory = opts.directory || './locales'; - this.updateFiles = typeof opts.updateFiles === 'boolean' ? opts.updateFiles : true; - this.locale = opts.locale || 'en'; - this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true; - // internal stuff. - this.cache = Object.create(null); - this.writeQueue = []; - } - __(...args) { - if (typeof arguments[0] !== 'string') { - return this._taggedLiteral(arguments[0], ...arguments); - } - const str = args.shift(); - let cb = function () { }; // start with noop. - if (typeof args[args.length - 1] === 'function') - cb = args.pop(); - cb = cb || function () { }; // noop. - if (!this.cache[this.locale]) - this._readLocaleFile(); - // we've observed a new string, update the language file. - if (!this.cache[this.locale][str] && this.updateFiles) { - this.cache[this.locale][str] = str; - // include the current directory and locale, - // since these values could change before the - // write is performed. - this._enqueueWrite({ - directory: this.directory, - locale: this.locale, - cb - }); - } - else { - cb(); - } - return shim.format.apply(shim.format, [this.cache[this.locale][str] || str].concat(args)); - } - __n() { - const args = Array.prototype.slice.call(arguments); - const singular = args.shift(); - const plural = args.shift(); - const quantity = args.shift(); - let cb = function () { }; // start with noop. - if (typeof args[args.length - 1] === 'function') - cb = args.pop(); - if (!this.cache[this.locale]) - this._readLocaleFile(); - let str = quantity === 1 ? singular : plural; - if (this.cache[this.locale][singular]) { - const entry = this.cache[this.locale][singular]; - str = entry[quantity === 1 ? 'one' : 'other']; - } - // we've observed a new string, update the language file. - if (!this.cache[this.locale][singular] && this.updateFiles) { - this.cache[this.locale][singular] = { - one: singular, - other: plural - }; - // include the current directory and locale, - // since these values could change before the - // write is performed. - this._enqueueWrite({ - directory: this.directory, - locale: this.locale, - cb - }); - } - else { - cb(); - } - // if a %d placeholder is provided, add quantity - // to the arguments expanded by util.format. - const values = [str]; - if (~str.indexOf('%d')) - values.push(quantity); - return shim.format.apply(shim.format, values.concat(args)); - } - setLocale(locale) { - this.locale = locale; - } - getLocale() { - return this.locale; - } - updateLocale(obj) { - if (!this.cache[this.locale]) - this._readLocaleFile(); - for (const key in obj) { - if (Object.prototype.hasOwnProperty.call(obj, key)) { - this.cache[this.locale][key] = obj[key]; - } - } - } - _taggedLiteral(parts, ...args) { - let str = ''; - parts.forEach(function (part, i) { - const arg = args[i + 1]; - str += part; - if (typeof arg !== 'undefined') { - str += '%s'; - } - }); - return this.__.apply(this, [str].concat([].slice.call(args, 1))); - } - _enqueueWrite(work) { - this.writeQueue.push(work); - if (this.writeQueue.length === 1) - this._processWriteQueue(); - } - _processWriteQueue() { - const _this = this; - const work = this.writeQueue[0]; - // destructure the enqueued work. - const directory = work.directory; - const locale = work.locale; - const cb = work.cb; - const languageFile = this._resolveLocaleFile(directory, locale); - const serializedLocale = JSON.stringify(this.cache[locale], null, 2); - shim.fs.writeFile(languageFile, serializedLocale, 'utf-8', function (err) { - _this.writeQueue.shift(); - if (_this.writeQueue.length > 0) - _this._processWriteQueue(); - cb(err); - }); - } - _readLocaleFile() { - let localeLookup = {}; - const languageFile = this._resolveLocaleFile(this.directory, this.locale); - try { - // When using a bundler such as webpack, readFileSync may not be defined: - if (shim.fs.readFileSync) { - localeLookup = JSON.parse(shim.fs.readFileSync(languageFile, 'utf-8')); - } - } - catch (err) { - if (err instanceof SyntaxError) { - err.message = 'syntax error in ' + languageFile; - } - if (err.code === 'ENOENT') - localeLookup = {}; - else - throw err; - } - this.cache[this.locale] = localeLookup; - } - _resolveLocaleFile(directory, locale) { - let file = shim.resolve(directory, './', locale + '.json'); - if (this.fallbackToLanguage && !this._fileExistsSync(file) && ~locale.lastIndexOf('_')) { - // attempt fallback to language only - const languageFile = shim.resolve(directory, './', locale.split('_')[0] + '.json'); - if (this._fileExistsSync(languageFile)) - file = languageFile; - } - return file; - } - _fileExistsSync(file) { - return shim.exists(file); - } -} -export function y18n(opts, _shim) { - shim = _shim; - const y18n = new Y18N(opts); - return { - __: y18n.__.bind(y18n), - __n: y18n.__n.bind(y18n), - setLocale: y18n.setLocale.bind(y18n), - getLocale: y18n.getLocale.bind(y18n), - updateLocale: y18n.updateLocale.bind(y18n), - locale: y18n.locale - }; -} diff --git a/node_modules/y18n/build/lib/platform-shims/node.js b/node_modules/y18n/build/lib/platform-shims/node.js deleted file mode 100644 index 181208b..0000000 --- a/node_modules/y18n/build/lib/platform-shims/node.js +++ /dev/null @@ -1,19 +0,0 @@ -import { readFileSync, statSync, writeFile } from 'fs'; -import { format } from 'util'; -import { resolve } from 'path'; -export default { - fs: { - readFileSync, - writeFile - }, - format, - resolve, - exists: (file) => { - try { - return statSync(file).isFile(); - } - catch (err) { - return false; - } - } -}; diff --git a/node_modules/y18n/index.mjs b/node_modules/y18n/index.mjs deleted file mode 100644 index 46c8213..0000000 --- a/node_modules/y18n/index.mjs +++ /dev/null @@ -1,8 +0,0 @@ -import shim from './build/lib/platform-shims/node.js' -import { y18n as _y18n } from './build/lib/index.js' - -const y18n = (opts) => { - return _y18n(opts, shim) -} - -export default y18n diff --git a/node_modules/y18n/package.json b/node_modules/y18n/package.json deleted file mode 100644 index 4e5c1ca..0000000 --- a/node_modules/y18n/package.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "name": "y18n", - "version": "5.0.8", - "description": "the bare-bones internationalization library used by yargs", - "exports": { - ".": [ - { - "import": "./index.mjs", - "require": "./build/index.cjs" - }, - "./build/index.cjs" - ] - }, - "type": "module", - "module": "./build/lib/index.js", - "keywords": [ - "i18n", - "internationalization", - "yargs" - ], - "homepage": "https://github.com/yargs/y18n", - "bugs": { - "url": "https://github.com/yargs/y18n/issues" - }, - "repository": "yargs/y18n", - "license": "ISC", - "author": "Ben Coe ", - "main": "./build/index.cjs", - "scripts": { - "check": "standardx **/*.ts **/*.cjs **/*.mjs", - "fix": "standardx --fix **/*.ts **/*.cjs **/*.mjs", - "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", - "test": "c8 --reporter=text --reporter=html mocha test/*.cjs", - "test:esm": "c8 --reporter=text --reporter=html mocha test/esm/*.mjs", - "posttest": "npm run check", - "coverage": "c8 report --check-coverage", - "precompile": "rimraf build", - "compile": "tsc", - "postcompile": "npm run build:cjs", - "build:cjs": "rollup -c", - "prepare": "npm run compile" - }, - "devDependencies": { - "@types/node": "^14.6.4", - "@wessberg/rollup-plugin-ts": "^1.3.1", - "c8": "^7.3.0", - "chai": "^4.0.1", - "cross-env": "^7.0.2", - "gts": "^3.0.0", - "mocha": "^8.0.0", - "rimraf": "^3.0.2", - "rollup": "^2.26.10", - "standardx": "^7.0.0", - "ts-transform-default-export": "^1.0.2", - "typescript": "^4.0.0" - }, - "files": [ - "build", - "index.mjs", - "!*.d.ts" - ], - "engines": { - "node": ">=10" - }, - "standardx": { - "ignore": [ - "build" - ] - } -} diff --git a/node_modules/yaml/LICENSE b/node_modules/yaml/LICENSE deleted file mode 100644 index e060aaa..0000000 --- a/node_modules/yaml/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/node_modules/yaml/README.md b/node_modules/yaml/README.md deleted file mode 100644 index 8ce1d9a..0000000 --- a/node_modules/yaml/README.md +++ /dev/null @@ -1,169 +0,0 @@ -# YAML - -`yaml` is a definitive library for [YAML](https://yaml.org/), the human friendly data serialization standard. -This library: - -- Supports both YAML 1.1 and YAML 1.2 and all common data schemas, -- Passes all of the [yaml-test-suite](https://github.com/yaml/yaml-test-suite) tests, -- Can accept any string as input without throwing, parsing as much YAML out of it as it can, and -- Supports parsing, modifying, and writing YAML comments and blank lines. - -The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/). -It has no external dependencies and runs on Node.js as well as modern browsers. - -For the purposes of versioning, any changes that break any of the documented endpoints or APIs will be considered semver-major breaking changes. -Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed). - -The minimum supported TypeScript version of the included typings is 3.9; -for use in earlier versions you may need to set `skipLibCheck: true` in your config. -This requirement may be updated between minor versions of the library. - -For more information, see the project's documentation site: [**eemeli.org/yaml**](https://eemeli.org/yaml/) - -To install: - -```sh -npm install yaml -``` - -**Note:** These docs are for `yaml@2`. For v1, see the [v1.10.0 tag](https://github.com/eemeli/yaml/tree/v1.10.0) for the source and [eemeli.org/yaml/v1](https://eemeli.org/yaml/v1/) for the documentation. - -The development and maintenance of this library is [sponsored](https://github.com/sponsors/eemeli) by: - -

    - Scipress -     - Manifest -

    - -## API Overview - -The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the underlying [Lexer/Parser/Composer](https://eemeli.org/yaml/#parsing-yaml). -The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third lets you get progressively closer to YAML source, if that's your thing. - -A [command-line tool](https://eemeli.org/yaml/#command-line-tool) is also included. - -```js -import { parse, stringify } from 'yaml' -// or -import YAML from 'yaml' -// or -const YAML = require('yaml') -``` - -### Parse & Stringify - -- [`parse(str, reviver?, options?): value`](https://eemeli.org/yaml/#yaml-parse) -- [`stringify(value, replacer?, options?): string`](https://eemeli.org/yaml/#yaml-stringify) - -### Documents - -- [`Document`](https://eemeli.org/yaml/#documents) - - [`constructor(value, replacer?, options?)`](https://eemeli.org/yaml/#creating-documents) - - [`#anchors`](https://eemeli.org/yaml/#working-with-anchors) - - [`#contents`](https://eemeli.org/yaml/#content-nodes) - - [`#directives`](https://eemeli.org/yaml/#stream-directives) - - [`#errors`](https://eemeli.org/yaml/#errors) - - [`#warnings`](https://eemeli.org/yaml/#errors) -- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`parseAllDocuments(str, options?): Document[]`](https://eemeli.org/yaml/#parsing-documents) -- [`parseDocument(str, options?): Document`](https://eemeli.org/yaml/#parsing-documents) - -### Content Nodes - -- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`new Scalar(value)`](https://eemeli.org/yaml/#scalar-values) -- [`new YAMLMap()`](https://eemeli.org/yaml/#collections) -- [`new YAMLSeq()`](https://eemeli.org/yaml/#collections) -- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#working-with-anchors) -- [`doc.createNode(value, options?): Node`](https://eemeli.org/yaml/#creating-nodes) -- [`doc.createPair(key, value): Pair`](https://eemeli.org/yaml/#creating-nodes) -- [`visit(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes) - -### Parsing YAML - -- [`new Lexer().lex(src)`](https://eemeli.org/yaml/#lexer) -- [`new Parser(onNewLine?).parse(src)`](https://eemeli.org/yaml/#parser) -- [`new Composer(options?).compose(tokens)`](https://eemeli.org/yaml/#composer) - -## YAML.parse - -```yaml -# file.yml -YAML: - - A human-readable data serialization language - - https://en.wikipedia.org/wiki/YAML -yaml: - - A complete JavaScript implementation - - https://www.npmjs.com/package/yaml -``` - -```js -import fs from 'fs' -import YAML from 'yaml' - -YAML.parse('3.14159') -// 3.14159 - -YAML.parse('[ true, false, maybe, null ]\n') -// [ true, false, 'maybe', null ] - -const file = fs.readFileSync('./file.yml', 'utf8') -YAML.parse(file) -// { YAML: -// [ 'A human-readable data serialization language', -// 'https://en.wikipedia.org/wiki/YAML' ], -// yaml: -// [ 'A complete JavaScript implementation', -// 'https://www.npmjs.com/package/yaml' ] } -``` - -## YAML.stringify - -```js -import YAML from 'yaml' - -YAML.stringify(3.14159) -// '3.14159\n' - -YAML.stringify([true, false, 'maybe', null]) -// `- true -// - false -// - maybe -// - null -// ` - -YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' }) -// `number: 3 -// plain: string -// block: | -// two -// lines -// ` -``` - ---- - -Browser testing provided by: - - -BrowserStack - diff --git a/node_modules/yaml/bin.mjs b/node_modules/yaml/bin.mjs deleted file mode 100755 index 7504ae1..0000000 --- a/node_modules/yaml/bin.mjs +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env node - -import { UserError, cli, help } from './dist/cli.mjs' - -cli(process.stdin, error => { - if (error instanceof UserError) { - if (error.code === UserError.ARGS) console.error(`${help}\n`) - console.error(error.message) - process.exitCode = error.code - } else if (error) throw error -}) diff --git a/node_modules/yaml/browser/dist/compose/compose-collection.js b/node_modules/yaml/browser/dist/compose/compose-collection.js deleted file mode 100644 index b80fba2..0000000 --- a/node_modules/yaml/browser/dist/compose/compose-collection.js +++ /dev/null @@ -1,88 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveBlockMap } from './resolve-block-map.js'; -import { resolveBlockSeq } from './resolve-block-seq.js'; -import { resolveFlowCollection } from './resolve-flow-collection.js'; - -function resolveCollection(CN, ctx, token, onError, tagName, tag) { - const coll = token.type === 'block-map' - ? resolveBlockMap(CN, ctx, token, onError, tag) - : token.type === 'block-seq' - ? resolveBlockSeq(CN, ctx, token, onError, tag) - : resolveFlowCollection(CN, ctx, token, onError, tag); - const Coll = coll.constructor; - // If we got a tagName matching the class, or the tag name is '!', - // then use the tagName from the node class used to create it. - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - if (tagName) - coll.tag = tagName; - return coll; -} -function composeCollection(CN, ctx, token, props, onError) { - const tagToken = props.tag; - const tagName = !tagToken - ? null - : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (token.type === 'block-seq') { - const { anchor, newlineAfterProp: nl } = props; - const lastProp = anchor && tagToken - ? anchor.offset > tagToken.offset - ? anchor - : tagToken - : (anchor ?? tagToken); - if (lastProp && (!nl || nl.offset < lastProp.offset)) { - const message = 'Missing newline after block sequence props'; - onError(lastProp, 'MISSING_CHAR', message); - } - } - const expType = token.type === 'block-map' - ? 'map' - : token.type === 'block-seq' - ? 'seq' - : token.start.source === '{' - ? 'map' - : 'seq'; - // shortcut: check if it's a generic YAMLMap or YAMLSeq - // before jumping into the custom tag logic. - if (!tagToken || - !tagName || - tagName === '!' || - (tagName === YAMLMap.tagName && expType === 'map') || - (tagName === YAMLSeq.tagName && expType === 'seq')) { - return resolveCollection(CN, ctx, token, onError, tagName); - } - let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - if (kt?.collection) { - onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - } - return resolveCollection(CN, ctx, token, onError, tagName); - } - } - const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); - const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; - const node = isNode(res) - ? res - : new Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -export { composeCollection }; diff --git a/node_modules/yaml/browser/dist/compose/compose-doc.js b/node_modules/yaml/browser/dist/compose/compose-doc.js deleted file mode 100644 index 9827b53..0000000 --- a/node_modules/yaml/browser/dist/compose/compose-doc.js +++ /dev/null @@ -1,43 +0,0 @@ -import { Document } from '../doc/Document.js'; -import { composeNode, composeEmptyNode } from './compose-node.js'; -import { resolveEnd } from './resolve-end.js'; -import { resolveProps } from './resolve-props.js'; - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document(undefined, opts); - const ctx = { - atKey: false, - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - parentIndent: 0, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - // @ts-expect-error If Contents is set, let's trust the user - doc.contents = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -export { composeDoc }; diff --git a/node_modules/yaml/browser/dist/compose/compose-node.js b/node_modules/yaml/browser/dist/compose/compose-node.js deleted file mode 100644 index cdc91b5..0000000 --- a/node_modules/yaml/browser/dist/compose/compose-node.js +++ /dev/null @@ -1,102 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { isScalar } from '../nodes/identity.js'; -import { composeCollection } from './compose-collection.js'; -import { composeScalar } from './compose-scalar.js'; -import { resolveEnd } from './resolve-end.js'; -import { emptyScalarPosition } from './util-empty-scalar-position.js'; - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const atKey = ctx.atKey; - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection(CN, ctx, token, props, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (atKey && - ctx.options.stringKeys && - (!isScalar(node) || - typeof node.value !== 'string' || - (node.tag && node.tag !== 'tag:yaml.org,2002:str'))) { - const msg = 'With stringKeys, all keys must be strings'; - onError(tag ?? token, 'NON_STRING_KEY', msg); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { - const token = { - type: 'scalar', - offset: emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - node.comment = comment; - node.range[2] = end; - } - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -export { composeEmptyNode, composeNode }; diff --git a/node_modules/yaml/browser/dist/compose/compose-scalar.js b/node_modules/yaml/browser/dist/compose/compose-scalar.js deleted file mode 100644 index 13ceda5..0000000 --- a/node_modules/yaml/browser/dist/compose/compose-scalar.js +++ /dev/null @@ -1,86 +0,0 @@ -import { isScalar, SCALAR } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { resolveBlockScalar } from './resolve-block-scalar.js'; -import { resolveFlowScalar } from './resolve-flow-scalar.js'; - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar(ctx, token, onError) - : resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - let tag; - if (ctx.options.stringKeys && ctx.atKey) { - tag = ctx.schema[SCALAR]; - } - else if (tagName) - tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError); - else if (token.type === 'scalar') - tag = findScalarTagByTest(ctx, value, token, onError); - else - tag = ctx.schema[SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = isScalar(res) ? res : new Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[SCALAR]; -} -function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) && - tag.test?.test(value)) || schema[SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -export { composeScalar }; diff --git a/node_modules/yaml/browser/dist/compose/composer.js b/node_modules/yaml/browser/dist/compose/composer.js deleted file mode 100644 index 01b387f..0000000 --- a/node_modules/yaml/browser/dist/compose/composer.js +++ /dev/null @@ -1,217 +0,0 @@ -import { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import { YAMLWarning, YAMLParseError } from '../errors.js'; -import { isCollection, isPair } from '../nodes/identity.js'; -import { composeDoc } from './compose-doc.js'; -import { resolveEnd } from './resolve-end.js'; - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new YAMLWarning(pos, code, message)); - else - this.errors.push(new YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -export { Composer }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-map.js b/node_modules/yaml/browser/dist/compose/resolve-block-map.js deleted file mode 100644 index d9b965d..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-block-map.js +++ /dev/null @@ -1,115 +0,0 @@ -import { Pair } from '../nodes/Pair.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { resolveProps } from './resolve-props.js'; -import { containsNewline } from './util-contains-newline.js'; -import { flowIndentCheck } from './util-flow-indent-check.js'; -import { mapIncludes } from './util-map-includes.js'; - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLMap; - const map = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - let commentEnd = null; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - parentIndent: bm.indent, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - commentEnd = keyProps.end; - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.newlineAfterProp || containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - ctx.atKey = true; - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - flowIndentCheck(bm.indent, key, onError); - ctx.atKey = false; - if (mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - parentIndent: bm.indent, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - if (commentEnd && commentEnd < offset) - onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); - map.range = [bm.offset, offset, commentEnd ?? offset]; - return map; -} - -export { resolveBlockMap }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js b/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js deleted file mode 100644 index 9b7b7b5..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js +++ /dev/null @@ -1,198 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; - -function resolveBlockScalar(ctx, scalar, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - if (trimIndent === 0 && !ctx.atRoot) { - const message = 'Block scalar values in collections must be indented'; - onError(offset, 'BAD_INDENT', message); - } - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -export { resolveBlockScalar }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-seq.js b/node_modules/yaml/browser/dist/compose/resolve-block-seq.js deleted file mode 100644 index 5c9e2ce..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-block-seq.js +++ /dev/null @@ -1,49 +0,0 @@ -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveProps } from './resolve-props.js'; -import { flowIndentCheck } from './util-flow-indent-check.js'; - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLSeq; - const seq = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - if (ctx.atKey) - ctx.atKey = false; - let offset = bs.offset; - let commentEnd = null; - for (const { start, value } of bs.items) { - const props = resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - parentIndent: bs.indent, - startOnNewline: true - }); - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - commentEnd = props.end; - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - if (ctx.schema.compat) - flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, commentEnd ?? offset]; - return seq; -} - -export { resolveBlockSeq }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-end.js b/node_modules/yaml/browser/dist/compose/resolve-end.js deleted file mode 100644 index d5c65d7..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-end.js +++ /dev/null @@ -1,37 +0,0 @@ -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -export { resolveEnd }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js b/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js deleted file mode 100644 index 32dc358..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js +++ /dev/null @@ -1,207 +0,0 @@ -import { isPair } from '../nodes/identity.js'; -import { Pair } from '../nodes/Pair.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveEnd } from './resolve-end.js'; -import { resolveProps } from './resolve-props.js'; -import { containsNewline } from './util-contains-newline.js'; -import { mapIncludes } from './util-map-includes.js'; - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap : YAMLSeq)); - const coll = new NodeClass(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - if (ctx.atKey) - ctx.atKey = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - parentIndent: fc.indent, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - ctx.atKey = true; - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - ctx.atKey = false; - // value properties - const valueProps = resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - parentIndent: fc.indent, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - const endRange = (valueNode ?? keyNode).range; - map.range = [keyNode.range[0], endRange[1], endRange[2]]; - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -export { resolveFlowCollection }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js b/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js deleted file mode 100644 index 5da8526..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js +++ /dev/null @@ -1,223 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import { resolveEnd } from './resolve-end.js'; - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', // null character - a: '\x07', // bell character - b: '\b', // backspace - e: '\x1b', // escape character - f: '\f', // form feed - n: '\n', // line feed - r: '\r', // carriage return - t: '\t', // horizontal tab - v: '\v', // vertical tab - N: '\u0085', // Unicode next line - _: '\u00a0', // Unicode non-breaking space - L: '\u2028', // Unicode line separator - P: '\u2029', // Unicode paragraph separator - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -export { resolveFlowScalar }; diff --git a/node_modules/yaml/browser/dist/compose/resolve-props.js b/node_modules/yaml/browser/dist/compose/resolve-props.js deleted file mode 100644 index dd6ce6c..0000000 --- a/node_modules/yaml/browser/dist/compose/resolve-props.js +++ /dev/null @@ -1,148 +0,0 @@ -function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let reqSpace = false; - let tab = null; - let anchor = null; - let tag = null; - let newlineAfterProp = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - if (tab) { - if (atNewline && token.type !== 'comment' && token.type !== 'newline') { - onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - } - tab = null; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - (indicator !== 'doc-start' || next?.type !== 'flow-collection') && - token.source.includes('\t')) { - tab = token; - } - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - newlineAfterProp = token; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = - indicator === 'seq-item-ind' || indicator === 'explicit-key-ind'; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) { - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - } - if (tab && - ((atNewline && tab.indent <= parentIndent) || - next?.type === 'block-map' || - next?.type === 'block-seq')) - onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - anchor, - tag, - newlineAfterProp, - end, - start: start ?? end - }; -} - -export { resolveProps }; diff --git a/node_modules/yaml/browser/dist/compose/util-contains-newline.js b/node_modules/yaml/browser/dist/compose/util-contains-newline.js deleted file mode 100644 index 2d65390..0000000 --- a/node_modules/yaml/browser/dist/compose/util-contains-newline.js +++ /dev/null @@ -1,34 +0,0 @@ -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -export { containsNewline }; diff --git a/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js b/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js deleted file mode 100644 index ab6e0c9..0000000 --- a/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js +++ /dev/null @@ -1,27 +0,0 @@ -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -export { emptyScalarPosition }; diff --git a/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js b/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js deleted file mode 100644 index c20e670..0000000 --- a/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js +++ /dev/null @@ -1,15 +0,0 @@ -import { containsNewline } from './util-contains-newline.js'; - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -export { flowIndentCheck }; diff --git a/node_modules/yaml/browser/dist/compose/util-map-includes.js b/node_modules/yaml/browser/dist/compose/util-map-includes.js deleted file mode 100644 index 48444b6..0000000 --- a/node_modules/yaml/browser/dist/compose/util-map-includes.js +++ /dev/null @@ -1,13 +0,0 @@ -import { isScalar } from '../nodes/identity.js'; - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || (isScalar(a) && isScalar(b) && a.value === b.value); - return items.some(pair => isEqual(pair.key, search)); -} - -export { mapIncludes }; diff --git a/node_modules/yaml/browser/dist/doc/Document.js b/node_modules/yaml/browser/dist/doc/Document.js deleted file mode 100644 index f1f4278..0000000 --- a/node_modules/yaml/browser/dist/doc/Document.js +++ /dev/null @@ -1,335 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { isEmptyPath, collectionFromPath } from '../nodes/Collection.js'; -import { NODE_TYPE, DOC, isNode, isCollection, isScalar } from '../nodes/identity.js'; -import { Pair } from '../nodes/Pair.js'; -import { toJS } from '../nodes/toJS.js'; -import { Schema } from '../schema/Schema.js'; -import { stringifyDocument } from '../stringify/stringifyDocument.js'; -import { anchorNames, findNewAnchor, createNodeAnchors } from './anchors.js'; -import { applyReviver } from './applyReviver.js'; -import { createNode } from './createNode.js'; -import { Directives } from './directives.js'; - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, NODE_TYPE, { value: DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - stringKeys: false, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new Directives({ version }); - this.setSchema(version, options); - // @ts-expect-error We can't really know that this matches Contents. - this.contents = - value === undefined ? null : this.createNode(value, _replacer, options); - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [NODE_TYPE]: { value: DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - // @ts-expect-error We can't really know that this matches Contents. - copy.contents = isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name; - } - return new Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode(value, tag, ctx); - if (flow && isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (isEmptyPath(path)) { - if (this.contents == null) - return false; - // @ts-expect-error Presumed impossible if Strict extends false - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (isEmptyPath(path)) - return !keepScalar && isScalar(this.contents) - ? this.contents.value - : this.contents; - return isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (isEmptyPath(path)) - return this.contents !== undefined; - return isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (isEmptyPath(path)) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = value; - } - else if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new Directives({ version: '1.1' }); - opt = { resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new Directives({ version }); - opt = { resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -export { Document }; diff --git a/node_modules/yaml/browser/dist/doc/anchors.js b/node_modules/yaml/browser/dist/doc/anchors.js deleted file mode 100644 index 03c2442..0000000 --- a/node_modules/yaml/browser/dist/doc/anchors.js +++ /dev/null @@ -1,72 +0,0 @@ -import { isScalar, isCollection } from '../nodes/identity.js'; -import { visit } from '../visit.js'; - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (isScalar(ref.node) || isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -export { anchorIsValid, anchorNames, createNodeAnchors, findNewAnchor }; diff --git a/node_modules/yaml/browser/dist/doc/applyReviver.js b/node_modules/yaml/browser/dist/doc/applyReviver.js deleted file mode 100644 index 6e77336..0000000 --- a/node_modules/yaml/browser/dist/doc/applyReviver.js +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - // eslint-disable-next-line @typescript-eslint/no-array-delete - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -export { applyReviver }; diff --git a/node_modules/yaml/browser/dist/doc/createNode.js b/node_modules/yaml/browser/dist/doc/createNode.js deleted file mode 100644 index 1392269..0000000 --- a/node_modules/yaml/browser/dist/doc/createNode.js +++ /dev/null @@ -1,89 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { isNode, isPair, MAP, SEQ, isDocument } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (isDocument(value)) - value = value.contents; - if (isNode(value)) - return value; - if (isPair(value)) { - const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[MAP] - : Symbol.iterator in Object(value) - ? schema[SEQ] - : schema[MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : typeof tagObj?.nodeClass?.from === 'function' - ? tagObj.nodeClass.from(ctx.schema, value, ctx) - : new Scalar(value); - if (tagName) - node.tag = tagName; - else if (!tagObj.default) - node.tag = tagObj.tag; - if (ref) - ref.node = node; - return node; -} - -export { createNode }; diff --git a/node_modules/yaml/browser/dist/doc/directives.js b/node_modules/yaml/browser/dist/doc/directives.js deleted file mode 100644 index c66e612..0000000 --- a/node_modules/yaml/browser/dist/doc/directives.js +++ /dev/null @@ -1,176 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { visit } from '../visit.js'; - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) { - try { - return prefix + decodeURIComponent(suffix); - } - catch (error) { - onError(String(error)); - return null; - } - } - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && isNode(doc.contents)) { - const tags = {}; - visit(doc.contents, (_key, node) => { - if (isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -export { Directives }; diff --git a/node_modules/yaml/browser/dist/errors.js b/node_modules/yaml/browser/dist/errors.js deleted file mode 100644 index ad91290..0000000 --- a/node_modules/yaml/browser/dist/errors.js +++ /dev/null @@ -1,57 +0,0 @@ -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.max(1, Math.min(end.col - col, 80 - ci)); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -export { YAMLError, YAMLParseError, YAMLWarning, prettifyError }; diff --git a/node_modules/yaml/browser/dist/index.js b/node_modules/yaml/browser/dist/index.js deleted file mode 100644 index 097bf24..0000000 --- a/node_modules/yaml/browser/dist/index.js +++ /dev/null @@ -1,17 +0,0 @@ -export { Composer } from './compose/composer.js'; -export { Document } from './doc/Document.js'; -export { Schema } from './schema/Schema.js'; -export { YAMLError, YAMLParseError, YAMLWarning } from './errors.js'; -export { Alias } from './nodes/Alias.js'; -export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity.js'; -export { Pair } from './nodes/Pair.js'; -export { Scalar } from './nodes/Scalar.js'; -export { YAMLMap } from './nodes/YAMLMap.js'; -export { YAMLSeq } from './nodes/YAMLSeq.js'; -import * as cst from './parse/cst.js'; -export { cst as CST }; -export { Lexer } from './parse/lexer.js'; -export { LineCounter } from './parse/line-counter.js'; -export { Parser } from './parse/parser.js'; -export { parse, parseAllDocuments, parseDocument, stringify } from './public-api.js'; -export { visit, visitAsync } from './visit.js'; diff --git a/node_modules/yaml/browser/dist/log.js b/node_modules/yaml/browser/dist/log.js deleted file mode 100644 index aa5ba56..0000000 --- a/node_modules/yaml/browser/dist/log.js +++ /dev/null @@ -1,14 +0,0 @@ -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -export { debug, warn }; diff --git a/node_modules/yaml/browser/dist/nodes/Alias.js b/node_modules/yaml/browser/dist/nodes/Alias.js deleted file mode 100644 index 8e34f09..0000000 --- a/node_modules/yaml/browser/dist/nodes/Alias.js +++ /dev/null @@ -1,101 +0,0 @@ -import { anchorIsValid } from '../doc/anchors.js'; -import { visit } from '../visit.js'; -import { ALIAS, isAlias, isCollection, isPair } from './identity.js'; -import { NodeBase } from './Node.js'; -import { toJS } from './toJS.js'; - -class Alias extends NodeBase { - constructor(source) { - super(ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - let data = anchors.get(source); - if (!data) { - // Resolve anchors for Node.prototype.toJS() - toJS(source, null, ctx); - data = anchors.get(source); - } - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -export { Alias }; diff --git a/node_modules/yaml/browser/dist/nodes/Collection.js b/node_modules/yaml/browser/dist/nodes/Collection.js deleted file mode 100644 index 0ebdeda..0000000 --- a/node_modules/yaml/browser/dist/nodes/Collection.js +++ /dev/null @@ -1,147 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { isNode, isPair, isCollection, isScalar } from './identity.js'; -import { NodeBase } from './Node.js'; - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => isNode(it) || isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && isScalar(node) ? node.value : node; - else - return isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} - -export { Collection, collectionFromPath, isEmptyPath }; diff --git a/node_modules/yaml/browser/dist/nodes/Node.js b/node_modules/yaml/browser/dist/nodes/Node.js deleted file mode 100644 index b0eb96b..0000000 --- a/node_modules/yaml/browser/dist/nodes/Node.js +++ /dev/null @@ -1,38 +0,0 @@ -import { applyReviver } from '../doc/applyReviver.js'; -import { NODE_TYPE, isDocument } from './identity.js'; -import { toJS } from './toJS.js'; - -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** A plain JavaScript representation of this node. */ - toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - if (!isDocument(doc)) - throw new TypeError('A document argument is required'); - const ctx = { - anchors: new Map(), - doc, - keep: true, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS(this, '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver(reviver, { '': res }, '', res) - : res; - } -} - -export { NodeBase }; diff --git a/node_modules/yaml/browser/dist/nodes/Pair.js b/node_modules/yaml/browser/dist/nodes/Pair.js deleted file mode 100644 index 6e419f6..0000000 --- a/node_modules/yaml/browser/dist/nodes/Pair.js +++ /dev/null @@ -1,36 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { stringifyPair } from '../stringify/stringifyPair.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { NODE_TYPE, PAIR, isNode } from './identity.js'; - -function createPair(key, value, ctx) { - const k = createNode(key, undefined, ctx); - const v = createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, NODE_TYPE, { value: PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (isNode(key)) - key = key.clone(schema); - if (isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -export { Pair, createPair }; diff --git a/node_modules/yaml/browser/dist/nodes/Scalar.js b/node_modules/yaml/browser/dist/nodes/Scalar.js deleted file mode 100644 index a9f2673..0000000 --- a/node_modules/yaml/browser/dist/nodes/Scalar.js +++ /dev/null @@ -1,24 +0,0 @@ -import { SCALAR } from './identity.js'; -import { NodeBase } from './Node.js'; -import { toJS } from './toJS.js'; - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends NodeBase { - constructor(value) { - super(SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -export { Scalar, isScalarValue }; diff --git a/node_modules/yaml/browser/dist/nodes/YAMLMap.js b/node_modules/yaml/browser/dist/nodes/YAMLMap.js deleted file mode 100644 index 5d88737..0000000 --- a/node_modules/yaml/browser/dist/nodes/YAMLMap.js +++ /dev/null @@ -1,144 +0,0 @@ -import { stringifyCollection } from '../stringify/stringifyCollection.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { Collection } from './Collection.js'; -import { isPair, isScalar, MAP } from './identity.js'; -import { Pair, createPair } from './Pair.js'; -import { isScalarValue } from './Scalar.js'; - -function findPair(items, key) { - const k = isScalar(key) ? key.value : key; - for (const it of items) { - if (isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection { - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - constructor(schema) { - super(MAP, schema); - this.items = []; - } - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new this(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair(pair, pair?.value); - } - else - _pair = new Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (isScalar(prev.value) && isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -export { YAMLMap, findPair }; diff --git a/node_modules/yaml/browser/dist/nodes/YAMLSeq.js b/node_modules/yaml/browser/dist/nodes/YAMLSeq.js deleted file mode 100644 index b80de40..0000000 --- a/node_modules/yaml/browser/dist/nodes/YAMLSeq.js +++ /dev/null @@ -1,113 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { stringifyCollection } from '../stringify/stringifyCollection.js'; -import { Collection } from './Collection.js'; -import { SEQ, isScalar } from './identity.js'; -import { isScalarValue } from './Scalar.js'; -import { toJS } from './toJS.js'; - -class YAMLSeq extends Collection { - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - constructor(schema) { - super(SEQ, schema); - this.items = []; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (isScalar(prev) && isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } - static from(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new this(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode(it, undefined, ctx)); - } - } - return seq; - } -} -function asItemIndex(key) { - let idx = isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -export { YAMLSeq }; diff --git a/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js b/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js deleted file mode 100644 index 53f7858..0000000 --- a/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js +++ /dev/null @@ -1,62 +0,0 @@ -import { warn } from '../log.js'; -import { isMergeKey, addMergeToJSMap } from '../schema/yaml-1.1/merge.js'; -import { createStringifyContext } from '../stringify/stringify.js'; -import { isNode } from './identity.js'; -import { toJS } from './toJS.js'; - -function addPairToJSMap(ctx, map, { key, value }) { - if (isNode(key) && key.addToJSMap) - key.addToJSMap(ctx, map, value); - // TODO: Should drop this special case for bare << handling - else if (isMergeKey(ctx, key)) - addMergeToJSMap(ctx, map, value); - else { - const jsKey = toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (isNode(key) && ctx?.doc) { - const strCtx = createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -export { addPairToJSMap }; diff --git a/node_modules/yaml/browser/dist/nodes/identity.js b/node_modules/yaml/browser/dist/nodes/identity.js deleted file mode 100644 index 7b79920..0000000 --- a/node_modules/yaml/browser/dist/nodes/identity.js +++ /dev/null @@ -1,36 +0,0 @@ -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; - -export { ALIAS, DOC, MAP, NODE_TYPE, PAIR, SCALAR, SEQ, hasAnchor, isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq }; diff --git a/node_modules/yaml/browser/dist/nodes/toJS.js b/node_modules/yaml/browser/dist/nodes/toJS.js deleted file mode 100644 index 0ca6250..0000000 --- a/node_modules/yaml/browser/dist/nodes/toJS.js +++ /dev/null @@ -1,37 +0,0 @@ -import { hasAnchor } from './identity.js'; - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -export { toJS }; diff --git a/node_modules/yaml/browser/dist/parse/cst-scalar.js b/node_modules/yaml/browser/dist/parse/cst-scalar.js deleted file mode 100644 index 29ab354..0000000 --- a/node_modules/yaml/browser/dist/parse/cst-scalar.js +++ /dev/null @@ -1,214 +0,0 @@ -import { resolveBlockScalar } from '../compose/resolve-block-scalar.js'; -import { resolveFlowScalar } from '../compose/resolve-flow-scalar.js'; -import { YAMLParseError } from '../errors.js'; -import { stringifyString } from '../stringify/stringifyString.js'; - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar({ options: { strict } }, token, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -export { createScalarToken, resolveAsScalar, setScalarValue }; diff --git a/node_modules/yaml/browser/dist/parse/cst-stringify.js b/node_modules/yaml/browser/dist/parse/cst-stringify.js deleted file mode 100644 index d6ab58c..0000000 --- a/node_modules/yaml/browser/dist/parse/cst-stringify.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -export { stringify }; diff --git a/node_modules/yaml/browser/dist/parse/cst-visit.js b/node_modules/yaml/browser/dist/parse/cst-visit.js deleted file mode 100644 index deca086..0000000 --- a/node_modules/yaml/browser/dist/parse/cst-visit.js +++ /dev/null @@ -1,97 +0,0 @@ -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -export { visit }; diff --git a/node_modules/yaml/browser/dist/parse/cst.js b/node_modules/yaml/browser/dist/parse/cst.js deleted file mode 100644 index 8bb2f4a..0000000 --- a/node_modules/yaml/browser/dist/parse/cst.js +++ /dev/null @@ -1,98 +0,0 @@ -export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js'; -export { stringify } from './cst-stringify.js'; -export { visit } from './cst-visit.js'; - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -export { BOM, DOCUMENT, FLOW_END, SCALAR, isCollection, isScalar, prettyToken, tokenType }; diff --git a/node_modules/yaml/browser/dist/parse/lexer.js b/node_modules/yaml/browser/dist/parse/lexer.js deleted file mode 100644 index fbab236..0000000 --- a/node_modules/yaml/browser/dist/parse/lexer.js +++ /dev/null @@ -1,717 +0,0 @@ -import { BOM, DOCUMENT, FLOW_END, SCALAR } from './cst.js'; - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = new Set('0123456789ABCDEFabcdef'); -const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()"); -const flowIndicatorChars = new Set(',[]{}'); -const invalidAnchorChars = new Set(' ,[]{}\n\r\t'); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - if (typeof source !== 'string') - throw TypeError('source is not a string'); - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - let cs = line.indexOf('#'); - while (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') { - dirEnd = cs - 1; - break; - } - else { - cs = line.indexOf('#', cs + 1); - } - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return s === '---' ? 'doc' : 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else { - this.indentNext = - this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext); - } - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - // Trailing insufficiently indented tabs are invalid. - // To catch that during parsing, we include them in the block scalar value. - let i = nl + 1; - ch = this.buffer[i]; - while (ch === ' ') - ch = this.buffer[++i]; - if (ch === '\t') { - while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n') - ch = this.buffer[++i]; - nl = i - 1; - } - else if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next))) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && flowIndicatorChars.has(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && flowIndicatorChars.has(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.has(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.has(this.buffer[i + 1]) && - hexDigits.has(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -export { Lexer }; diff --git a/node_modules/yaml/browser/dist/parse/line-counter.js b/node_modules/yaml/browser/dist/parse/line-counter.js deleted file mode 100644 index 002ce24..0000000 --- a/node_modules/yaml/browser/dist/parse/line-counter.js +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -export { LineCounter }; diff --git a/node_modules/yaml/browser/dist/parse/parser.js b/node_modules/yaml/browser/dist/parse/parser.js deleted file mode 100644 index 0cd7d1a..0000000 --- a/node_modules/yaml/browser/dist/parse/parser.js +++ /dev/null @@ -1,954 +0,0 @@ -import { tokenType } from './cst.js'; -import { Lexer } from './lexer.js'; - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !it.explicitKey; - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atMapIndent = !this.onKeyLine && this.indent === map.indent; - const atNextItem = atMapIndent && - (it.sep || it.explicitKey) && - this.type !== 'seq-item-ind'; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !it.explicitKey) { - it.start.push(this.sourceToken); - it.explicitKey = true; - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start, explicitKey: true }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken], explicitKey: true }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (it.explicitKey) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key; - // @ts-expect-error type guard is wrong here - delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atMapIndent && bv.type !== 'block-seq') { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, explicitKey: true }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -export { Parser }; diff --git a/node_modules/yaml/browser/dist/public-api.js b/node_modules/yaml/browser/dist/public-api.js deleted file mode 100644 index 116f6ee..0000000 --- a/node_modules/yaml/browser/dist/public-api.js +++ /dev/null @@ -1,102 +0,0 @@ -import { Composer } from './compose/composer.js'; -import { Document } from './doc/Document.js'; -import { prettifyError, YAMLParseError } from './errors.js'; -import { warn } from './log.js'; -import { isDocument } from './nodes/identity.js'; -import { LineCounter } from './parse/line-counter.js'; -import { Parser } from './parse/parser.js'; - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null; - return { lineCounter, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser = new Parser(lineCounter?.addNewLine); - const composer = new Composer(options); - const docs = Array.from(composer.compose(parser.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(prettifyError(source, lineCounter)); - doc.warnings.forEach(prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser = new Parser(lineCounter?.addNewLine); - const composer = new Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer.compose(parser.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(prettifyError(source, lineCounter)); - doc.warnings.forEach(prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - if (isDocument(value) && !_replacer) - return value.toString(options); - return new Document(value, _replacer, options).toString(options); -} - -export { parse, parseAllDocuments, parseDocument, stringify }; diff --git a/node_modules/yaml/browser/dist/schema/Schema.js b/node_modules/yaml/browser/dist/schema/Schema.js deleted file mode 100644 index 60a85c5..0000000 --- a/node_modules/yaml/browser/dist/schema/Schema.js +++ /dev/null @@ -1,37 +0,0 @@ -import { MAP, SCALAR, SEQ } from '../nodes/identity.js'; -import { map } from './common/map.js'; -import { seq } from './common/seq.js'; -import { string } from './common/string.js'; -import { getTags, coreKnownTags } from './tags.js'; - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? getTags(compat, 'compat') - : compat - ? getTags(null, compat) - : null; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? coreKnownTags : {}; - this.tags = getTags(customTags, this.name, merge); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, MAP, { value: map }); - Object.defineProperty(this, SCALAR, { value: string }); - Object.defineProperty(this, SEQ, { value: seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -export { Schema }; diff --git a/node_modules/yaml/browser/dist/schema/common/map.js b/node_modules/yaml/browser/dist/schema/common/map.js deleted file mode 100644 index af97b78..0000000 --- a/node_modules/yaml/browser/dist/schema/common/map.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isMap } from '../../nodes/identity.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; - -const map = { - collection: 'map', - default: true, - nodeClass: YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!isMap(map)) - onError('Expected a mapping for this tag'); - return map; - }, - createNode: (schema, obj, ctx) => YAMLMap.from(schema, obj, ctx) -}; - -export { map }; diff --git a/node_modules/yaml/browser/dist/schema/common/null.js b/node_modules/yaml/browser/dist/schema/common/null.js deleted file mode 100644 index fcbe1b7..0000000 --- a/node_modules/yaml/browser/dist/schema/common/null.js +++ /dev/null @@ -1,15 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -export { nullTag }; diff --git a/node_modules/yaml/browser/dist/schema/common/seq.js b/node_modules/yaml/browser/dist/schema/common/seq.js deleted file mode 100644 index 1915b60..0000000 --- a/node_modules/yaml/browser/dist/schema/common/seq.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isSeq } from '../../nodes/identity.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; - -const seq = { - collection: 'seq', - default: true, - nodeClass: YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - }, - createNode: (schema, obj, ctx) => YAMLSeq.from(schema, obj, ctx) -}; - -export { seq }; diff --git a/node_modules/yaml/browser/dist/schema/common/string.js b/node_modules/yaml/browser/dist/schema/common/string.js deleted file mode 100644 index a064f7b..0000000 --- a/node_modules/yaml/browser/dist/schema/common/string.js +++ /dev/null @@ -1,14 +0,0 @@ -import { stringifyString } from '../../stringify/stringifyString.js'; - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -export { string }; diff --git a/node_modules/yaml/browser/dist/schema/core/bool.js b/node_modules/yaml/browser/dist/schema/core/bool.js deleted file mode 100644 index ab3c943..0000000 --- a/node_modules/yaml/browser/dist/schema/core/bool.js +++ /dev/null @@ -1,19 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -export { boolTag }; diff --git a/node_modules/yaml/browser/dist/schema/core/float.js b/node_modules/yaml/browser/dist/schema/core/float.js deleted file mode 100644 index 3fa9cf8..0000000 --- a/node_modules/yaml/browser/dist/schema/core/float.js +++ /dev/null @@ -1,43 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber -}; - -export { float, floatExp, floatNaN }; diff --git a/node_modules/yaml/browser/dist/schema/core/int.js b/node_modules/yaml/browser/dist/schema/core/int.js deleted file mode 100644 index 7091235..0000000 --- a/node_modules/yaml/browser/dist/schema/core/int.js +++ /dev/null @@ -1,38 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -export { int, intHex, intOct }; diff --git a/node_modules/yaml/browser/dist/schema/core/schema.js b/node_modules/yaml/browser/dist/schema/core/schema.js deleted file mode 100644 index dd02b2e..0000000 --- a/node_modules/yaml/browser/dist/schema/core/schema.js +++ /dev/null @@ -1,23 +0,0 @@ -import { map } from '../common/map.js'; -import { nullTag } from '../common/null.js'; -import { seq } from '../common/seq.js'; -import { string } from '../common/string.js'; -import { boolTag } from './bool.js'; -import { floatNaN, floatExp, float } from './float.js'; -import { intOct, int, intHex } from './int.js'; - -const schema = [ - map, - seq, - string, - nullTag, - boolTag, - intOct, - int, - intHex, - floatNaN, - floatExp, - float -]; - -export { schema }; diff --git a/node_modules/yaml/browser/dist/schema/json/schema.js b/node_modules/yaml/browser/dist/schema/json/schema.js deleted file mode 100644 index ada1c63..0000000 --- a/node_modules/yaml/browser/dist/schema/json/schema.js +++ /dev/null @@ -1,62 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { map } from '../common/map.js'; -import { seq } from '../common/seq.js'; - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true$|^false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map, seq].concat(jsonScalars, jsonError); - -export { schema }; diff --git a/node_modules/yaml/browser/dist/schema/tags.js b/node_modules/yaml/browser/dist/schema/tags.js deleted file mode 100644 index ba52c35..0000000 --- a/node_modules/yaml/browser/dist/schema/tags.js +++ /dev/null @@ -1,96 +0,0 @@ -import { map } from './common/map.js'; -import { nullTag } from './common/null.js'; -import { seq } from './common/seq.js'; -import { string } from './common/string.js'; -import { boolTag } from './core/bool.js'; -import { float, floatExp, floatNaN } from './core/float.js'; -import { int, intHex, intOct } from './core/int.js'; -import { schema } from './core/schema.js'; -import { schema as schema$1 } from './json/schema.js'; -import { binary } from './yaml-1.1/binary.js'; -import { merge } from './yaml-1.1/merge.js'; -import { omap } from './yaml-1.1/omap.js'; -import { pairs } from './yaml-1.1/pairs.js'; -import { schema as schema$2 } from './yaml-1.1/schema.js'; -import { set } from './yaml-1.1/set.js'; -import { timestamp, floatTime, intTime } from './yaml-1.1/timestamp.js'; - -const schemas = new Map([ - ['core', schema], - ['failsafe', [map, seq, string]], - ['json', schema$1], - ['yaml11', schema$2], - ['yaml-1.1', schema$2] -]); -const tagsByName = { - binary, - bool: boolTag, - float, - floatExp, - floatNaN, - floatTime, - int, - intHex, - intOct, - intTime, - map, - merge, - null: nullTag, - omap, - pairs, - seq, - set, - timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary, - 'tag:yaml.org,2002:merge': merge, - 'tag:yaml.org,2002:omap': omap, - 'tag:yaml.org,2002:pairs': pairs, - 'tag:yaml.org,2002:set': set, - 'tag:yaml.org,2002:timestamp': timestamp -}; -function getTags(customTags, schemaName, addMergeTag) { - const schemaTags = schemas.get(schemaName); - if (schemaTags && !customTags) { - return addMergeTag && !schemaTags.includes(merge) - ? schemaTags.concat(merge) - : schemaTags.slice(); - } - let tags = schemaTags; - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - if (addMergeTag) - tags = tags.concat(merge); - return tags.reduce((tags, tag) => { - const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag; - if (!tagObj) { - const tagName = JSON.stringify(tag); - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`); - } - if (!tags.includes(tagObj)) - tags.push(tagObj); - return tags; - }, []); -} - -export { coreKnownTags, getTags }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js deleted file mode 100644 index 8021a52..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js +++ /dev/null @@ -1,66 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyString } from '../../stringify/stringifyString.js'; - -const binary = { - identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.BLOCK_LITERAL; - if (type !== Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -export { binary }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js deleted file mode 100644 index 999b59d..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js +++ /dev/null @@ -1,26 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/, - resolve: () => new Scalar(false), - stringify: boolStringify -}; - -export { falseTag, trueTag }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js deleted file mode 100644 index 2f06117..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js +++ /dev/null @@ -1,46 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber -}; - -export { float, floatExp, floatNaN }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js deleted file mode 100644 index f572823..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js +++ /dev/null @@ -1,71 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -export { int, intBin, intHex, intOct }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/merge.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/merge.js deleted file mode 100644 index d361f36..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/merge.js +++ /dev/null @@ -1,64 +0,0 @@ -import { isScalar, isAlias, isSeq, isMap } from '../../nodes/identity.js'; -import { Scalar } from '../../nodes/Scalar.js'; - -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -const MERGE_KEY = '<<'; -const merge = { - identify: value => value === MERGE_KEY || - (typeof value === 'symbol' && value.description === MERGE_KEY), - default: 'key', - tag: 'tag:yaml.org,2002:merge', - test: /^<<$/, - resolve: () => Object.assign(new Scalar(Symbol(MERGE_KEY)), { - addToJSMap: addMergeToJSMap - }), - stringify: () => MERGE_KEY -}; -const isMergeKey = (ctx, key) => (merge.identify(key) || - (isScalar(key) && - (!key.type || key.type === Scalar.PLAIN) && - merge.identify(key.value))) && - ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default); -function addMergeToJSMap(ctx, map, value) { - value = ctx && isAlias(value) ? value.resolve(ctx.doc) : value; - if (isSeq(value)) - for (const it of value.items) - mergeValue(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeValue(ctx, map, it); - else - mergeValue(ctx, map, value); -} -function mergeValue(ctx, map, value) { - const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value; - if (!isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} - -export { addMergeToJSMap, isMergeKey, merge }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js deleted file mode 100644 index 5574ac5..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js +++ /dev/null @@ -1,74 +0,0 @@ -import { isScalar, isPair } from '../../nodes/identity.js'; -import { toJS } from '../../nodes/toJS.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import { resolvePairs, createPairs } from './pairs.js'; - -class YAMLOMap extends YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (isPair(pair)) { - key = toJS(pair.key, '', ctx); - value = toJS(pair.value, key, ctx); - } - else { - key = toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } - static from(schema, iterable, ctx) { - const pairs = createPairs(schema, iterable, ctx); - const omap = new this(); - omap.items = pairs.items; - return omap; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs = resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs.items) { - if (isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs); - }, - createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) -}; - -export { YAMLOMap, omap }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js deleted file mode 100644 index 18e3020..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js +++ /dev/null @@ -1,78 +0,0 @@ -import { isSeq, isPair, isMap } from '../../nodes/identity.js'; -import { Pair, createPair } from '../../nodes/Pair.js'; -import { Scalar } from '../../nodes/Scalar.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; - -function resolvePairs(seq, onError) { - if (isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (isPair(item)) - continue; - else if (isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair(new Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = isPair(item) ? item : new Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else { - throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); - } - } - else { - key = it; - } - pairs.items.push(createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -export { createPairs, pairs, resolvePairs }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js deleted file mode 100644 index e516ced..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js +++ /dev/null @@ -1,39 +0,0 @@ -import { map } from '../common/map.js'; -import { nullTag } from '../common/null.js'; -import { seq } from '../common/seq.js'; -import { string } from '../common/string.js'; -import { binary } from './binary.js'; -import { trueTag, falseTag } from './bool.js'; -import { floatNaN, floatExp, float } from './float.js'; -import { intBin, intOct, int, intHex } from './int.js'; -import { merge } from './merge.js'; -import { omap } from './omap.js'; -import { pairs } from './pairs.js'; -import { set } from './set.js'; -import { intTime, floatTime, timestamp } from './timestamp.js'; - -const schema = [ - map, - seq, - string, - nullTag, - trueTag, - falseTag, - intBin, - intOct, - int, - intHex, - floatNaN, - floatExp, - float, - binary, - merge, - omap, - pairs, - set, - intTime, - floatTime, - timestamp -]; - -export { schema }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js deleted file mode 100644 index a3cf4ec..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js +++ /dev/null @@ -1,93 +0,0 @@ -import { isMap, isPair, isScalar } from '../../nodes/identity.js'; -import { Pair, createPair } from '../../nodes/Pair.js'; -import { YAMLMap, findPair } from '../../nodes/YAMLMap.js'; - -class YAMLSet extends YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (isPair(key)) - pair = key; - else if (key && - typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair(key.key, null); - else - pair = new Pair(key, null); - const prev = findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = findPair(this.items, key); - return !keepPair && isPair(pair) - ? isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } - static from(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new this(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(createPair(value, null, ctx)); - } - return set; - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), - resolve(map, onError) { - if (isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - } -}; - -export { YAMLSet, set }; diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js deleted file mode 100644 index 66daec4..0000000 --- a/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js +++ /dev/null @@ -1,101 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => String(n).padStart(2, '0')) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/(T00:00:00)?\.000Z$/, '') -}; - -export { floatTime, intTime, timestamp }; diff --git a/node_modules/yaml/browser/dist/stringify/foldFlowLines.js b/node_modules/yaml/browser/dist/stringify/foldFlowLines.js deleted file mode 100644 index 2f0bd07..0000000 --- a/node_modules/yaml/browser/dist/stringify/foldFlowLines.js +++ /dev/null @@ -1,146 +0,0 @@ -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - if (lineWidth < minContentWidth) - minContentWidth = 0; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i, indent.length); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i, indent.length); - end = i + indent.length + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i, indent) { - let end = i; - let start = i + 1; - let ch = text[start]; - while (ch === ' ' || ch === '\t') { - if (i < start + indent) { - ch = text[++i]; - } - else { - do { - ch = text[++i]; - } while (ch && ch !== '\n'); - end = i; - start = i + 1; - ch = text[start]; - } - } - return end; -} - -export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines }; diff --git a/node_modules/yaml/browser/dist/stringify/stringify.js b/node_modules/yaml/browser/dist/stringify/stringify.js deleted file mode 100644 index ad15d8f..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringify.js +++ /dev/null @@ -1,129 +0,0 @@ -import { anchorIsValid } from '../doc/anchors.js'; -import { isPair, isAlias, isNode, isScalar, isCollection } from '../nodes/identity.js'; -import { stringifyComment } from './stringifyComment.js'; -import { stringifyString } from './stringifyString.js'; - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - flowCollectionPadding: true, - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (isScalar(item)) { - obj = item.value; - let match = tags.filter(t => t.identify?.(obj)); - if (match.length > 1) { - const testMatch = match.filter(t => t.test); - if (testMatch.length > 0) - match = testMatch; - } - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (isScalar(node) || isCollection(node)) && node.anchor; - if (anchor && anchorIsValid(anchor)) { - anchors.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : isScalar(node) - ? stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -export { createStringifyContext, stringify }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyCollection.js b/node_modules/yaml/browser/dist/stringify/stringifyCollection.js deleted file mode 100644 index 9019400..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyCollection.js +++ /dev/null @@ -1,143 +0,0 @@ -import { isNode, isPair } from '../nodes/identity.js'; -import { stringify } from './stringify.js'; -import { lineComment, indentComment } from './stringifyComment.js'; - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (isPair(item)) { - const ik = isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) { - const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (isPair(item)) { - const ik = isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik?.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - const { start, end } = flowChars; - if (lines.length === 0) { - return start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth; - } - if (reqNewline) { - let str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - return `${str}\n${indent}${end}`; - } - else { - return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; - } - } -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -export { stringifyCollection }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyComment.js b/node_modules/yaml/browser/dist/stringify/stringifyComment.js deleted file mode 100644 index f16fc91..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyComment.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -export { indentComment, lineComment, stringifyComment }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyDocument.js b/node_modules/yaml/browser/dist/stringify/stringifyDocument.js deleted file mode 100644 index 2a9defa..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyDocument.js +++ /dev/null @@ -1,85 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { createStringifyContext, stringify } from './stringify.js'; -import { indentComment, lineComment } from './stringifyComment.js'; - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -export { stringifyDocument }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyNumber.js b/node_modules/yaml/browser/dist/stringify/stringifyNumber.js deleted file mode 100644 index 3fa35f9..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyNumber.js +++ /dev/null @@ -1,24 +0,0 @@ -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -export { stringifyNumber }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyPair.js b/node_modules/yaml/browser/dist/stringify/stringifyPair.js deleted file mode 100644 index 353124c..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyPair.js +++ /dev/null @@ -1,150 +0,0 @@ -import { isCollection, isNode, isScalar, isSeq } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { stringify } from './stringify.js'; -import { lineComment, indentComment } from './stringifyComment.js'; - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (isCollection(key) || (!isNode(key) && typeof key === 'object')) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - isCollection(key) || - (isScalar(key) - ? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += lineComment(str, ctx.indent, commentString(keyComment)); - } - let vsb, vcb, valueComment; - if (isNode(value)) { - vsb = !!value.spaceBefore; - vcb = value.commentBefore; - valueComment = value.comment; - } - else { - vsb = false; - vcb = null; - valueComment = null; - if (value && typeof value === 'object') - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substring(2); - } - let valueCommentDone = false; - const valueStr = stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (keyComment || vsb || vcb) { - ws = vsb ? '\n' : ''; - if (vcb) { - const cs = commentString(vcb); - ws += `\n${indentComment(cs, ctx.indent)}`; - } - if (valueStr === '' && !ctx.inFlow) { - if (ws === '\n') - ws = '\n\n'; - } - else { - ws += `\n${ctx.indent}`; - } - } - else if (!explicitKey && isCollection(value)) { - const vs0 = valueStr[0]; - const nl0 = valueStr.indexOf('\n'); - const hasNewline = nl0 !== -1; - const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; - if (hasNewline || !flow) { - let hasPropsLine = false; - if (hasNewline && (vs0 === '&' || vs0 === '!')) { - let sp0 = valueStr.indexOf(' '); - if (vs0 === '&' && - sp0 !== -1 && - sp0 < nl0 && - valueStr[sp0 + 1] === '!') { - sp0 = valueStr.indexOf(' ', sp0 + 1); - } - if (sp0 === -1 || nl0 < sp0) - hasPropsLine = true; - } - if (!hasPropsLine) - ws = `\n${ctx.indent}`; - } - } - else if (valueStr === '' || valueStr[0] === '\n') { - ws = ''; - } - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -export { stringifyPair }; diff --git a/node_modules/yaml/browser/dist/stringify/stringifyString.js b/node_modules/yaml/browser/dist/stringify/stringifyString.js deleted file mode 100644 index e19b3d4..0000000 --- a/node_modules/yaml/browser/dist/stringify/stringifyString.js +++ /dev/null @@ -1,337 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js'; - -const getFoldOptions = (ctx, isBlock) => ({ - indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx, false)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx, false)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -// The negative lookbehind avoids a polynomial search, -// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind -let blockEndNewlines; -try { - blockEndNewlines = new RegExp('(^|(?\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(blockEndNewlines, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - // Leading | or > is added later - let header = (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (!literal) { - const foldedValue = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - let literalFallback = false; - const foldOptions = getFoldOptions(ctx, true); - if (blockQuote !== 'folded' && type !== Scalar.BLOCK_FOLDED) { - foldOptions.onOverflow = () => { - literalFallback = true; - }; - } - const body = foldFlowLines(`${start}${foldedValue}${end}`, indent, FOLD_BLOCK, foldOptions); - if (!literalFallback) - return `>${header}\n${indent}${body}`; - } - value = value.replace(/\n+/g, `$&${indent}`); - return `|${header}\n${indent}${start}${value}${end}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; - if ((implicitKey && value.includes('\n')) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (containsDocumentMarker(value)) { - if (indent === '') { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - else if (implicitKey && indent === indentStep) { - return quotedString(value, ctx); - } - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx, false)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.BLOCK_FOLDED: - case Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -export { stringifyString }; diff --git a/node_modules/yaml/browser/dist/util.js b/node_modules/yaml/browser/dist/util.js deleted file mode 100644 index ec59413..0000000 --- a/node_modules/yaml/browser/dist/util.js +++ /dev/null @@ -1,11 +0,0 @@ -export { createNode } from './doc/createNode.js'; -export { debug, warn } from './log.js'; -export { createPair } from './nodes/Pair.js'; -export { findPair } from './nodes/YAMLMap.js'; -export { toJS } from './nodes/toJS.js'; -export { map as mapTag } from './schema/common/map.js'; -export { seq as seqTag } from './schema/common/seq.js'; -export { string as stringTag } from './schema/common/string.js'; -export { foldFlowLines } from './stringify/foldFlowLines.js'; -export { stringifyNumber } from './stringify/stringifyNumber.js'; -export { stringifyString } from './stringify/stringifyString.js'; diff --git a/node_modules/yaml/browser/dist/visit.js b/node_modules/yaml/browser/dist/visit.js deleted file mode 100644 index b5eef41..0000000 --- a/node_modules/yaml/browser/dist/visit.js +++ /dev/null @@ -1,233 +0,0 @@ -import { isDocument, isNode, isPair, isCollection, isMap, isSeq, isScalar, isAlias } from './nodes/identity.js'; - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (isNode(ctrl) || isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (isNode(ctrl) || isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (isMap(node)) - return visitor.Map?.(key, node, path); - if (isSeq(node)) - return visitor.Seq?.(key, node, path); - if (isPair(node)) - return visitor.Pair?.(key, node, path); - if (isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (isCollection(parent)) { - parent.items[key] = node; - } - else if (isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (isDocument(parent)) { - parent.contents = node; - } - else { - const pt = isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -export { visit, visitAsync }; diff --git a/node_modules/yaml/browser/index.js b/node_modules/yaml/browser/index.js deleted file mode 100644 index 5f73271..0000000 --- a/node_modules/yaml/browser/index.js +++ /dev/null @@ -1,5 +0,0 @@ -// `export * as default from ...` fails on Webpack v4 -// https://github.com/eemeli/yaml/issues/228 -import * as YAML from './dist/index.js' -export default YAML -export * from './dist/index.js' diff --git a/node_modules/yaml/browser/package.json b/node_modules/yaml/browser/package.json deleted file mode 100644 index 3dbc1ca..0000000 --- a/node_modules/yaml/browser/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/yaml/dist/cli.d.ts b/node_modules/yaml/dist/cli.d.ts deleted file mode 100644 index 7e2ea2a..0000000 --- a/node_modules/yaml/dist/cli.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -export declare const help = "yaml: A command-line YAML processor and inspector\n\nReads stdin and writes output to stdout and errors & warnings to stderr.\n\nUsage:\n yaml Process a YAML stream, outputting it as YAML\n yaml cst Parse the CST of a YAML stream\n yaml lex Parse the lexical tokens of a YAML stream\n yaml valid Validate a YAML stream, returning 0 on success\n\nOptions:\n --help, -h Show this message.\n --json, -j Output JSON.\n --indent 2 Output pretty-printed data, indented by the given number of spaces.\n\nAdditional options for bare \"yaml\" command:\n --doc, -d Output pretty-printed JS Document objects.\n --single, -1 Require the input to consist of a single YAML document.\n --strict, -s Stop on errors.\n --visit, -v Apply a visitor to each document (requires a path to import)\n --yaml 1.1 Set the YAML version. (default: 1.2)"; -export declare class UserError extends Error { - static ARGS: number; - static SINGLE: number; - code: number; - constructor(code: number, message: string); -} -export declare function cli(stdin: NodeJS.ReadableStream, done: (error?: Error) => void, argv?: string[]): Promise; diff --git a/node_modules/yaml/dist/cli.mjs b/node_modules/yaml/dist/cli.mjs deleted file mode 100644 index d3c7d43..0000000 --- a/node_modules/yaml/dist/cli.mjs +++ /dev/null @@ -1,199 +0,0 @@ -import { resolve } from 'node:path'; -import { parseArgs } from 'node:util'; -import { prettyToken } from './parse/cst.js'; -import { Lexer } from './parse/lexer.js'; -import { Parser } from './parse/parser.js'; -import { Composer } from './compose/composer.js'; -import { LineCounter } from './parse/line-counter.js'; -import { prettifyError } from './errors.js'; -import { visit } from './visit.js'; - -const help = `\ -yaml: A command-line YAML processor and inspector - -Reads stdin and writes output to stdout and errors & warnings to stderr. - -Usage: - yaml Process a YAML stream, outputting it as YAML - yaml cst Parse the CST of a YAML stream - yaml lex Parse the lexical tokens of a YAML stream - yaml valid Validate a YAML stream, returning 0 on success - -Options: - --help, -h Show this message. - --json, -j Output JSON. - --indent 2 Output pretty-printed data, indented by the given number of spaces. - -Additional options for bare "yaml" command: - --doc, -d Output pretty-printed JS Document objects. - --single, -1 Require the input to consist of a single YAML document. - --strict, -s Stop on errors. - --visit, -v Apply a visitor to each document (requires a path to import) - --yaml 1.1 Set the YAML version. (default: 1.2)`; -class UserError extends Error { - constructor(code, message) { - super(`Error: ${message}`); - this.code = code; - } -} -UserError.ARGS = 2; -UserError.SINGLE = 3; -async function cli(stdin, done, argv) { - let args; - try { - args = parseArgs({ - args: argv, - allowPositionals: true, - options: { - doc: { type: 'boolean', short: 'd' }, - help: { type: 'boolean', short: 'h' }, - indent: { type: 'string', short: 'i' }, - json: { type: 'boolean', short: 'j' }, - single: { type: 'boolean', short: '1' }, - strict: { type: 'boolean', short: 's' }, - visit: { type: 'string', short: 'v' }, - yaml: { type: 'string', default: '1.2' } - } - }); - } - catch (error) { - return done(new UserError(UserError.ARGS, error.message)); - } - const { positionals: [mode], values: opt } = args; - let indent = Number(opt.indent); - stdin.setEncoding('utf-8'); - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - switch (opt.help || mode) { - /* istanbul ignore next */ - case true: // --help - console.log(help); - break; - case 'lex': { - const lexer = new Lexer(); - const data = []; - const add = (tok) => { - if (opt.json) - data.push(tok); - else - console.log(prettyToken(tok)); - }; - stdin.on('data', (chunk) => { - for (const tok of lexer.lex(chunk, true)) - add(tok); - }); - stdin.on('end', () => { - for (const tok of lexer.lex('', false)) - add(tok); - if (opt.json) - console.log(JSON.stringify(data, null, indent)); - done(); - }); - break; - } - case 'cst': { - const parser = new Parser(); - const data = []; - const add = (tok) => { - if (opt.json) - data.push(tok); - else - console.dir(tok, { depth: null }); - }; - stdin.on('data', (chunk) => { - for (const tok of parser.parse(chunk, true)) - add(tok); - }); - stdin.on('end', () => { - for (const tok of parser.parse('', false)) - add(tok); - if (opt.json) - console.log(JSON.stringify(data, null, indent)); - done(); - }); - break; - } - case undefined: - case 'valid': { - const lineCounter = new LineCounter(); - const parser = new Parser(lineCounter.addNewLine); - // @ts-expect-error Version is validated at runtime - const composer = new Composer({ version: opt.yaml }); - const visitor = opt.visit - ? (await import(resolve(opt.visit))).default - : null; - let source = ''; - let hasDoc = false; - let reqDocEnd = false; - const data = []; - const add = (doc) => { - if (hasDoc && opt.single) { - return done(new UserError(UserError.SINGLE, 'Input stream contains multiple documents')); - } - for (const error of doc.errors) { - prettifyError(source, lineCounter)(error); - if (opt.strict || mode === 'valid') - return done(error); - console.error(error); - } - for (const warning of doc.warnings) { - prettifyError(source, lineCounter)(warning); - console.error(warning); - } - if (visitor) - visit(doc, visitor); - if (mode === 'valid') - doc.toJS(); - else if (opt.json) - data.push(doc); - else if (opt.doc) { - Object.defineProperties(doc, { - options: { enumerable: false }, - schema: { enumerable: false } - }); - console.dir(doc, { depth: null }); - } - else { - if (reqDocEnd) - console.log('...'); - try { - indent || (indent = 2); - const str = doc.toString({ indent }); - console.log(str.endsWith('\n') ? str.slice(0, -1) : str); - } - catch (error) { - done(error); - } - } - hasDoc = true; - reqDocEnd = !doc.directives?.docEnd; - }; - stdin.on('data', (chunk) => { - source += chunk; - for (const tok of parser.parse(chunk, true)) { - for (const doc of composer.next(tok)) - add(doc); - } - }); - stdin.on('end', () => { - for (const tok of parser.parse('', false)) { - for (const doc of composer.next(tok)) - add(doc); - } - for (const doc of composer.end(false)) - add(doc); - if (opt.single && !hasDoc) { - return done(new UserError(UserError.SINGLE, 'Input stream contained no documents')); - } - if (mode !== 'valid' && opt.json) { - console.log(JSON.stringify(opt.single ? data[0] : data, null, indent)); - } - done(); - }); - break; - } - default: - done(new UserError(UserError.ARGS, `Unknown command: ${JSON.stringify(mode)}`)); - } -} - -export { UserError, cli, help }; diff --git a/node_modules/yaml/dist/compose/compose-collection.d.ts b/node_modules/yaml/dist/compose/compose-collection.d.ts deleted file mode 100644 index 22fe516..0000000 --- a/node_modules/yaml/dist/compose/compose-collection.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { ParsedNode } from '../nodes/Node.js'; -import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -interface Props { - anchor: SourceToken | null; - tag: SourceToken | null; - newlineAfterProp: SourceToken | null; -} -export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, props: Props, onError: ComposeErrorHandler): ParsedNode; -export {}; diff --git a/node_modules/yaml/dist/compose/compose-collection.js b/node_modules/yaml/dist/compose/compose-collection.js deleted file mode 100644 index 3f91468..0000000 --- a/node_modules/yaml/dist/compose/compose-collection.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveBlockMap = require('./resolve-block-map.js'); -var resolveBlockSeq = require('./resolve-block-seq.js'); -var resolveFlowCollection = require('./resolve-flow-collection.js'); - -function resolveCollection(CN, ctx, token, onError, tagName, tag) { - const coll = token.type === 'block-map' - ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag) - : token.type === 'block-seq' - ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag) - : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag); - const Coll = coll.constructor; - // If we got a tagName matching the class, or the tag name is '!', - // then use the tagName from the node class used to create it. - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - if (tagName) - coll.tag = tagName; - return coll; -} -function composeCollection(CN, ctx, token, props, onError) { - const tagToken = props.tag; - const tagName = !tagToken - ? null - : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - if (token.type === 'block-seq') { - const { anchor, newlineAfterProp: nl } = props; - const lastProp = anchor && tagToken - ? anchor.offset > tagToken.offset - ? anchor - : tagToken - : (anchor ?? tagToken); - if (lastProp && (!nl || nl.offset < lastProp.offset)) { - const message = 'Missing newline after block sequence props'; - onError(lastProp, 'MISSING_CHAR', message); - } - } - const expType = token.type === 'block-map' - ? 'map' - : token.type === 'block-seq' - ? 'seq' - : token.start.source === '{' - ? 'map' - : 'seq'; - // shortcut: check if it's a generic YAMLMap or YAMLSeq - // before jumping into the custom tag logic. - if (!tagToken || - !tagName || - tagName === '!' || - (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') || - (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq')) { - return resolveCollection(CN, ctx, token, onError, tagName); - } - let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - if (kt?.collection) { - onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - } - return resolveCollection(CN, ctx, token, onError, tagName); - } - } - const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); - const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; - const node = identity.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; diff --git a/node_modules/yaml/dist/compose/compose-doc.d.ts b/node_modules/yaml/dist/compose/compose-doc.d.ts deleted file mode 100644 index 05816a7..0000000 --- a/node_modules/yaml/dist/compose/compose-doc.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import type { ParsedNode } from '../nodes/Node.js'; -import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js'; -import type * as CST from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function composeDoc(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed; diff --git a/node_modules/yaml/dist/compose/compose-doc.js b/node_modules/yaml/dist/compose/compose-doc.js deleted file mode 100644 index 63c9495..0000000 --- a/node_modules/yaml/dist/compose/compose-doc.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -var Document = require('../doc/Document.js'); -var composeNode = require('./compose-node.js'); -var resolveEnd = require('./resolve-end.js'); -var resolveProps = require('./resolve-props.js'); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atKey: false, - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - parentIndent: 0, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - // @ts-expect-error If Contents is set, let's trust the user - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; diff --git a/node_modules/yaml/dist/compose/compose-node.d.ts b/node_modules/yaml/dist/compose/compose-node.d.ts deleted file mode 100644 index 5de1f38..0000000 --- a/node_modules/yaml/dist/compose/compose-node.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { Directives } from '../doc/directives.js'; -import type { ParsedNode } from '../nodes/Node.js'; -import type { ParseOptions } from '../options.js'; -import type { SourceToken, Token } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { ComposeErrorHandler } from './composer.js'; -export interface ComposeContext { - atKey: boolean; - atRoot: boolean; - directives: Directives; - options: Readonly>>; - schema: Readonly; -} -interface Props { - spaceBefore: boolean; - comment: string; - anchor: SourceToken | null; - tag: SourceToken | null; - newlineAfterProp: SourceToken | null; - end: number; -} -declare const CN: { - composeNode: typeof composeNode; - composeEmptyNode: typeof composeEmptyNode; -}; -export type ComposeNode = typeof CN; -export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode; -export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag, end }: Props, onError: ComposeErrorHandler): import("../index.js").Scalar.Parsed; -export {}; diff --git a/node_modules/yaml/dist/compose/compose-node.js b/node_modules/yaml/dist/compose/compose-node.js deleted file mode 100644 index c5f4d8b..0000000 --- a/node_modules/yaml/dist/compose/compose-node.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var identity = require('../nodes/identity.js'); -var composeCollection = require('./compose-collection.js'); -var composeScalar = require('./compose-scalar.js'); -var resolveEnd = require('./resolve-end.js'); -var utilEmptyScalarPosition = require('./util-empty-scalar-position.js'); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const atKey = ctx.atKey; - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, props, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (atKey && - ctx.options.stringKeys && - (!identity.isScalar(node) || - typeof node.value !== 'string' || - (node.tag && node.tag !== 'tag:yaml.org,2002:str'))) { - const msg = 'With stringKeys, all keys must be strings'; - onError(tag ?? token, 'NON_STRING_KEY', msg); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - node.comment = comment; - node.range[2] = end; - } - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; diff --git a/node_modules/yaml/dist/compose/compose-scalar.d.ts b/node_modules/yaml/dist/compose/compose-scalar.d.ts deleted file mode 100644 index d5d0f79..0000000 --- a/node_modules/yaml/dist/compose/compose-scalar.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst.js'; -import type { ComposeContext } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed; diff --git a/node_modules/yaml/dist/compose/compose-scalar.js b/node_modules/yaml/dist/compose/compose-scalar.js deleted file mode 100644 index 7fc7ed4..0000000 --- a/node_modules/yaml/dist/compose/compose-scalar.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var resolveBlockScalar = require('./resolve-block-scalar.js'); -var resolveFlowScalar = require('./resolve-flow-scalar.js'); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(ctx, token, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - let tag; - if (ctx.options.stringKeys && ctx.atKey) { - tag = ctx.schema[identity.SCALAR]; - } - else if (tagName) - tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError); - else if (token.type === 'scalar') - tag = findScalarTagByTest(ctx, value, token, onError); - else - tag = ctx.schema[identity.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[identity.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[identity.SCALAR]; -} -function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) && - tag.test?.test(value)) || schema[identity.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[identity.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; diff --git a/node_modules/yaml/dist/compose/composer.d.ts b/node_modules/yaml/dist/compose/composer.d.ts deleted file mode 100644 index cd3d323..0000000 --- a/node_modules/yaml/dist/compose/composer.d.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import { ErrorCode, YAMLParseError, YAMLWarning } from '../errors.js'; -import type { ParsedNode, Range } from '../nodes/Node.js'; -import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js'; -import type { Token } from '../parse/cst.js'; -type ErrorSource = number | [number, number] | Range | { - offset: number; - source?: string; -}; -export type ComposeErrorHandler = (source: ErrorSource, code: ErrorCode, message: string, warning?: boolean) => void; -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -export declare class Composer { - private directives; - private doc; - private options; - private atDirectives; - private prelude; - private errors; - private warnings; - constructor(options?: ParseOptions & DocumentOptions & SchemaOptions); - private onError; - private decorate; - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo(): { - comment: string; - directives: Directives; - errors: YAMLParseError[]; - warnings: YAMLWarning[]; - }; - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - compose(tokens: Iterable, forceDoc?: boolean, endOffset?: number): Generator, void, unknown>; - /** Advance the composer by one CST token. */ - next(token: Token): Generator, void, unknown>; - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - end(forceDoc?: boolean, endOffset?: number): Generator, void, unknown>; -} -export {}; diff --git a/node_modules/yaml/dist/compose/composer.js b/node_modules/yaml/dist/compose/composer.js deleted file mode 100644 index 0b89117..0000000 --- a/node_modules/yaml/dist/compose/composer.js +++ /dev/null @@ -1,221 +0,0 @@ -'use strict'; - -var directives = require('../doc/directives.js'); -var Document = require('../doc/Document.js'); -var errors = require('../errors.js'); -var identity = require('../nodes/identity.js'); -var composeDoc = require('./compose-doc.js'); -var resolveEnd = require('./resolve-end.js'); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (identity.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; diff --git a/node_modules/yaml/dist/compose/resolve-block-map.d.ts b/node_modules/yaml/dist/compose/resolve-block-map.d.ts deleted file mode 100644 index cf82dee..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-map.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { YAMLMap } from '../nodes/YAMLMap.js'; -import type { BlockMap } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed; diff --git a/node_modules/yaml/dist/compose/resolve-block-map.js b/node_modules/yaml/dist/compose/resolve-block-map.js deleted file mode 100644 index f0d9727..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-map.js +++ /dev/null @@ -1,117 +0,0 @@ -'use strict'; - -var Pair = require('../nodes/Pair.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var resolveProps = require('./resolve-props.js'); -var utilContainsNewline = require('./util-contains-newline.js'); -var utilFlowIndentCheck = require('./util-flow-indent-check.js'); -var utilMapIncludes = require('./util-map-includes.js'); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap; - const map = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - let commentEnd = null; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - parentIndent: bm.indent, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - commentEnd = keyProps.end; - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.newlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - ctx.atKey = true; - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - ctx.atKey = false; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - parentIndent: bm.indent, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - if (commentEnd && commentEnd < offset) - onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); - map.range = [bm.offset, offset, commentEnd ?? offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts deleted file mode 100644 index 6d22901..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Range } from '../nodes/Node.js'; -import { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar } from '../parse/cst.js'; -import type { ComposeContext } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockScalar(ctx: ComposeContext, scalar: BlockScalar, onError: ComposeErrorHandler): { - value: string; - type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null; - comment: string; - range: Range; -}; diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.js b/node_modules/yaml/dist/compose/resolve-block-scalar.js deleted file mode 100644 index 97eaf2b..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-scalar.js +++ /dev/null @@ -1,200 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); - -function resolveBlockScalar(ctx, scalar, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - if (trimIndent === 0 && !ctx.atRoot) { - const message = 'Block scalar values in collections must be indented'; - onError(offset, 'BAD_INDENT', message); - } - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.d.ts b/node_modules/yaml/dist/compose/resolve-block-seq.d.ts deleted file mode 100644 index 139ad25..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-seq.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { BlockSequence } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLSeq.Parsed; diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.js b/node_modules/yaml/dist/compose/resolve-block-seq.js deleted file mode 100644 index 8787fd7..0000000 --- a/node_modules/yaml/dist/compose/resolve-block-seq.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveProps = require('./resolve-props.js'); -var utilFlowIndentCheck = require('./util-flow-indent-check.js'); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq; - const seq = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - if (ctx.atKey) - ctx.atKey = false; - let offset = bs.offset; - let commentEnd = null; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - parentIndent: bs.indent, - startOnNewline: true - }); - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - commentEnd = props.end; - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, commentEnd ?? offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; diff --git a/node_modules/yaml/dist/compose/resolve-end.d.ts b/node_modules/yaml/dist/compose/resolve-end.d.ts deleted file mode 100644 index bb2d0b8..0000000 --- a/node_modules/yaml/dist/compose/resolve-end.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { SourceToken } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): { - comment: string; - offset: number; -}; diff --git a/node_modules/yaml/dist/compose/resolve-end.js b/node_modules/yaml/dist/compose/resolve-end.js deleted file mode 100644 index 3a58347..0000000 --- a/node_modules/yaml/dist/compose/resolve-end.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts b/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts deleted file mode 100644 index 8db5985..0000000 --- a/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { FlowCollection } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed | YAMLSeq.Parsed; diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.js b/node_modules/yaml/dist/compose/resolve-flow-collection.js deleted file mode 100644 index 10a7506..0000000 --- a/node_modules/yaml/dist/compose/resolve-flow-collection.js +++ /dev/null @@ -1,209 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Pair = require('../nodes/Pair.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveEnd = require('./resolve-end.js'); -var resolveProps = require('./resolve-props.js'); -var utilContainsNewline = require('./util-contains-newline.js'); -var utilMapIncludes = require('./util-map-includes.js'); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq)); - const coll = new NodeClass(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - if (ctx.atKey) - ctx.atKey = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - parentIndent: fc.indent, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (identity.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - ctx.atKey = true; - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - ctx.atKey = false; - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - parentIndent: fc.indent, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - const endRange = (valueNode ?? keyNode).range; - map.range = [keyNode.range[0], endRange[1], endRange[2]]; - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts deleted file mode 100644 index 0c9204d..0000000 --- a/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Range } from '../nodes/Node.js'; -import { Scalar } from '../nodes/Scalar.js'; -import type { FlowScalar } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): { - value: string; - type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null; - comment: string; - range: Range; -}; diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.js b/node_modules/yaml/dist/compose/resolve-flow-scalar.js deleted file mode 100644 index 45aad99..0000000 --- a/node_modules/yaml/dist/compose/resolve-flow-scalar.js +++ /dev/null @@ -1,225 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); -var resolveEnd = require('./resolve-end.js'); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', // null character - a: '\x07', // bell character - b: '\b', // backspace - e: '\x1b', // escape character - f: '\f', // form feed - n: '\n', // line feed - r: '\r', // carriage return - t: '\t', // horizontal tab - v: '\v', // vertical tab - N: '\u0085', // Unicode next line - _: '\u00a0', // Unicode non-breaking space - L: '\u2028', // Unicode line separator - P: '\u2029', // Unicode paragraph separator - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; diff --git a/node_modules/yaml/dist/compose/resolve-props.d.ts b/node_modules/yaml/dist/compose/resolve-props.d.ts deleted file mode 100644 index ea3d828..0000000 --- a/node_modules/yaml/dist/compose/resolve-props.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { SourceToken, Token } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export interface ResolvePropsArg { - flow?: 'flow map' | 'flow sequence'; - indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind'; - next: Token | null | undefined; - offset: number; - onError: ComposeErrorHandler; - parentIndent: number; - startOnNewline: boolean; -} -export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, parentIndent, startOnNewline }: ResolvePropsArg): { - comma: SourceToken | null; - found: SourceToken | null; - spaceBefore: boolean; - comment: string; - hasNewline: boolean; - anchor: SourceToken | null; - tag: SourceToken | null; - newlineAfterProp: SourceToken | null; - end: number; - start: number; -}; diff --git a/node_modules/yaml/dist/compose/resolve-props.js b/node_modules/yaml/dist/compose/resolve-props.js deleted file mode 100644 index 9b63cfd..0000000 --- a/node_modules/yaml/dist/compose/resolve-props.js +++ /dev/null @@ -1,150 +0,0 @@ -'use strict'; - -function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let reqSpace = false; - let tab = null; - let anchor = null; - let tag = null; - let newlineAfterProp = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - if (tab) { - if (atNewline && token.type !== 'comment' && token.type !== 'newline') { - onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - } - tab = null; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - (indicator !== 'doc-start' || next?.type !== 'flow-collection') && - token.source.includes('\t')) { - tab = token; - } - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - newlineAfterProp = token; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = - indicator === 'seq-item-ind' || indicator === 'explicit-key-ind'; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) { - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - } - if (tab && - ((atNewline && tab.indent <= parentIndent) || - next?.type === 'block-map' || - next?.type === 'block-seq')) - onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - anchor, - tag, - newlineAfterProp, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; diff --git a/node_modules/yaml/dist/compose/util-contains-newline.d.ts b/node_modules/yaml/dist/compose/util-contains-newline.d.ts deleted file mode 100644 index 8155be0..0000000 --- a/node_modules/yaml/dist/compose/util-contains-newline.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Token } from '../parse/cst.js'; -export declare function containsNewline(key: Token | null | undefined): boolean | null; diff --git a/node_modules/yaml/dist/compose/util-contains-newline.js b/node_modules/yaml/dist/compose/util-contains-newline.js deleted file mode 100644 index e7aa82d..0000000 --- a/node_modules/yaml/dist/compose/util-contains-newline.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict'; - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts b/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts deleted file mode 100644 index 90499b8..0000000 --- a/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Token } from '../parse/cst.js'; -export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number; diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.js b/node_modules/yaml/dist/compose/util-empty-scalar-position.js deleted file mode 100644 index b2cd849..0000000 --- a/node_modules/yaml/dist/compose/util-empty-scalar-position.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict'; - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts b/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts deleted file mode 100644 index 64ed1fc..0000000 --- a/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { Token } from '../parse/cst'; -import { ComposeErrorHandler } from './composer'; -export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void; diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.js b/node_modules/yaml/dist/compose/util-flow-indent-check.js deleted file mode 100644 index 1e6b06f..0000000 --- a/node_modules/yaml/dist/compose/util-flow-indent-check.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -var utilContainsNewline = require('./util-contains-newline.js'); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; diff --git a/node_modules/yaml/dist/compose/util-map-includes.d.ts b/node_modules/yaml/dist/compose/util-map-includes.d.ts deleted file mode 100644 index fae2276..0000000 --- a/node_modules/yaml/dist/compose/util-map-includes.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ParsedNode } from '../nodes/Node.js'; -import type { Pair } from '../nodes/Pair.js'; -import type { ComposeContext } from './compose-node.js'; -export declare function mapIncludes(ctx: ComposeContext, items: Pair[], search: ParsedNode): boolean; diff --git a/node_modules/yaml/dist/compose/util-map-includes.js b/node_modules/yaml/dist/compose/util-map-includes.js deleted file mode 100644 index ebd7a2d..0000000 --- a/node_modules/yaml/dist/compose/util-map-includes.js +++ /dev/null @@ -1,15 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; diff --git a/node_modules/yaml/dist/doc/Document.d.ts b/node_modules/yaml/dist/doc/Document.d.ts deleted file mode 100644 index 431b907..0000000 --- a/node_modules/yaml/dist/doc/Document.d.ts +++ /dev/null @@ -1,141 +0,0 @@ -import type { YAMLError, YAMLWarning } from '../errors.js'; -import { Alias } from '../nodes/Alias.js'; -import { NODE_TYPE } from '../nodes/identity.js'; -import type { Node, NodeType, ParsedNode, Range } from '../nodes/Node.js'; -import { Pair } from '../nodes/Pair.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { YAMLMap } from '../nodes/YAMLMap.js'; -import type { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options.js'; -import { Schema } from '../schema/Schema.js'; -import { Directives } from './directives.js'; -export type Replacer = any[] | ((key: any, value: any) => unknown); -export declare namespace Document { - /** @ts-ignore The typing of directives fails in TS <= 4.2 */ - interface Parsed extends Document { - directives: Directives; - range: Range; - } -} -export declare class Document { - readonly [NODE_TYPE]: symbol; - /** A comment before this Document */ - commentBefore: string | null; - /** A comment immediately after this Document */ - comment: string | null; - /** The document contents. */ - contents: Strict extends true ? Contents | null : Contents; - directives: Strict extends true ? Directives | undefined : Directives; - /** Errors encountered during parsing. */ - errors: YAMLError[]; - options: Required>; - /** - * The `[start, value-end, node-end]` character offsets for the part of the - * source parsed into this document (undefined if not parsed). The `value-end` - * and `node-end` positions are themselves not included in their respective - * ranges. - */ - range?: Range; - /** The schema used with the document. Use `setSchema()` to change. */ - schema: Schema; - /** Warnings encountered during parsing. */ - warnings: YAMLWarning[]; - /** - * @param value - The initial value for the document, which will be wrapped - * in a Node container. - */ - constructor(value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions); - constructor(value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions); - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone(): Document; - /** Adds a value to the document. */ - add(value: any): void; - /** Adds a value to the document. */ - addIn(path: Iterable, value: unknown): void; - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node: Strict extends true ? Scalar | YAMLMap | YAMLSeq : Node, name?: string): Alias; - /** - * Convert any value into a `Node` using the current schema, recursively - * turning objects into collections. - */ - createNode(value: T, options?: CreateNodeOptions): NodeType; - createNode(value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions): NodeType; - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key: unknown, value: unknown, options?: CreateNodeOptions): Pair; - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key: unknown): boolean; - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path: Iterable | null): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key: unknown, keepScalar?: boolean): Strict extends true ? unknown : any; - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path: Iterable | null, keepScalar?: boolean): Strict extends true ? unknown : any; - /** - * Checks if the document includes a value with the key `key`. - */ - has(key: unknown): boolean; - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path: Iterable | null): boolean; - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key: any, value: unknown): void; - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path: Iterable | null, value: unknown): void; - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version: '1.1' | '1.2' | 'next' | null, options?: SchemaOptions): void; - /** A plain JavaScript representation of the document `contents`. */ - toJS(opt?: ToJSOptions & { - [ignored: string]: unknown; - }): any; - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any; - /** A YAML representation of the document. */ - toString(options?: ToStringOptions): string; -} diff --git a/node_modules/yaml/dist/doc/Document.js b/node_modules/yaml/dist/doc/Document.js deleted file mode 100644 index a953088..0000000 --- a/node_modules/yaml/dist/doc/Document.js +++ /dev/null @@ -1,337 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var Collection = require('../nodes/Collection.js'); -var identity = require('../nodes/identity.js'); -var Pair = require('../nodes/Pair.js'); -var toJS = require('../nodes/toJS.js'); -var Schema = require('../schema/Schema.js'); -var stringifyDocument = require('../stringify/stringifyDocument.js'); -var anchors = require('./anchors.js'); -var applyReviver = require('./applyReviver.js'); -var createNode = require('./createNode.js'); -var directives = require('./directives.js'); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - stringKeys: false, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - // @ts-expect-error We can't really know that this matches Contents. - this.contents = - value === undefined ? null : this.createNode(value, _replacer, options); - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [identity.NODE_TYPE]: { value: identity.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - // @ts-expect-error We can't really know that this matches Contents. - copy.contents = identity.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && identity.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - // @ts-expect-error Presumed impossible if Strict extends false - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return identity.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && identity.isScalar(this.contents) - ? this.contents.value - : this.contents; - return identity.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return identity.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = value; - } - else if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (identity.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; diff --git a/node_modules/yaml/dist/doc/anchors.d.ts b/node_modules/yaml/dist/doc/anchors.d.ts deleted file mode 100644 index f5e967c..0000000 --- a/node_modules/yaml/dist/doc/anchors.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { Node } from '../nodes/Node.js'; -import type { Document } from './Document.js'; -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -export declare function anchorIsValid(anchor: string): true; -export declare function anchorNames(root: Document | Node): Set; -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -export declare function findNewAnchor(prefix: string, exclude: Set): string; -export declare function createNodeAnchors(doc: Document, prefix: string): { - onAnchor: (source: unknown) => string; - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => void; - sourceObjects: Map; -}; diff --git a/node_modules/yaml/dist/doc/anchors.js b/node_modules/yaml/dist/doc/anchors.js deleted file mode 100644 index 223639a..0000000 --- a/node_modules/yaml/dist/doc/anchors.js +++ /dev/null @@ -1,77 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var visit = require('../visit.js'); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (identity.isScalar(ref.node) || identity.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; diff --git a/node_modules/yaml/dist/doc/applyReviver.d.ts b/node_modules/yaml/dist/doc/applyReviver.d.ts deleted file mode 100644 index e125b08..0000000 --- a/node_modules/yaml/dist/doc/applyReviver.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -export type Reviver = (key: unknown, value: unknown) => unknown; -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -export declare function applyReviver(reviver: Reviver, obj: unknown, key: unknown, val: any): unknown; diff --git a/node_modules/yaml/dist/doc/applyReviver.js b/node_modules/yaml/dist/doc/applyReviver.js deleted file mode 100644 index bfd0ba8..0000000 --- a/node_modules/yaml/dist/doc/applyReviver.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict'; - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - // eslint-disable-next-line @typescript-eslint/no-array-delete - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; diff --git a/node_modules/yaml/dist/doc/createNode.d.ts b/node_modules/yaml/dist/doc/createNode.d.ts deleted file mode 100644 index 1619981..0000000 --- a/node_modules/yaml/dist/doc/createNode.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { Node } from '../nodes/Node.js'; -import type { Schema } from '../schema/Schema.js'; -import type { CollectionTag, ScalarTag } from '../schema/types.js'; -import type { Replacer } from './Document.js'; -export interface CreateNodeContext { - aliasDuplicateObjects: boolean; - keepUndefined: boolean; - onAnchor: (source: unknown) => string; - onTagObj?: (tagObj: ScalarTag | CollectionTag) => void; - sourceObjects: Map; - replacer?: Replacer; - schema: Schema; -} -export declare function createNode(value: unknown, tagName: string | undefined, ctx: CreateNodeContext): Node; diff --git a/node_modules/yaml/dist/doc/createNode.js b/node_modules/yaml/dist/doc/createNode.js deleted file mode 100644 index 64dd31a..0000000 --- a/node_modules/yaml/dist/doc/createNode.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (identity.isDocument(value)) - value = value.contents; - if (identity.isNode(value)) - return value; - if (identity.isPair(value)) { - const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[identity.MAP] - : Symbol.iterator in Object(value) - ? schema[identity.SEQ] - : schema[identity.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : typeof tagObj?.nodeClass?.from === 'function' - ? tagObj.nodeClass.from(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - else if (!tagObj.default) - node.tag = tagObj.tag; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; diff --git a/node_modules/yaml/dist/doc/directives.d.ts b/node_modules/yaml/dist/doc/directives.d.ts deleted file mode 100644 index ff8a2cb..0000000 --- a/node_modules/yaml/dist/doc/directives.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { Document } from './Document.js'; -export declare class Directives { - static defaultYaml: Directives['yaml']; - static defaultTags: Directives['tags']; - yaml: { - version: '1.1' | '1.2' | 'next'; - explicit?: boolean; - }; - tags: Record; - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - docStart: true | null; - /** The doc-end marker `...`. */ - docEnd: boolean; - /** - * Used when parsing YAML 1.1, where: - * > If the document specifies no directives, it is parsed using the same - * > settings as the previous document. If the document does specify any - * > directives, all directives of previous documents, if any, are ignored. - */ - private atNextDocument?; - constructor(yaml?: Directives['yaml'], tags?: Directives['tags']); - clone(): Directives; - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument(): Directives; - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line: string, onError: (offset: number, message: string, warning?: boolean) => void): boolean; - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source: string, onError: (message: string) => void): string | null; - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag: string): string; - toString(doc?: Document): string; -} diff --git a/node_modules/yaml/dist/doc/directives.js b/node_modules/yaml/dist/doc/directives.js deleted file mode 100644 index e13b10e..0000000 --- a/node_modules/yaml/dist/doc/directives.js +++ /dev/null @@ -1,178 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var visit = require('../visit.js'); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) { - try { - return prefix + decodeURIComponent(suffix); - } - catch (error) { - onError(String(error)); - return null; - } - } - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (identity.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; diff --git a/node_modules/yaml/dist/errors.d.ts b/node_modules/yaml/dist/errors.d.ts deleted file mode 100644 index e5ee857..0000000 --- a/node_modules/yaml/dist/errors.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { LineCounter } from './parse/line-counter'; -export type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'NON_STRING_KEY' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN' | 'BAD_COLLECTION_TYPE'; -export type LinePos = { - line: number; - col: number; -}; -export declare class YAMLError extends Error { - name: 'YAMLParseError' | 'YAMLWarning'; - code: ErrorCode; - message: string; - pos: [number, number]; - linePos?: [LinePos] | [LinePos, LinePos]; - constructor(name: YAMLError['name'], pos: [number, number], code: ErrorCode, message: string); -} -export declare class YAMLParseError extends YAMLError { - constructor(pos: [number, number], code: ErrorCode, message: string); -} -export declare class YAMLWarning extends YAMLError { - constructor(pos: [number, number], code: ErrorCode, message: string); -} -export declare const prettifyError: (src: string, lc: LineCounter) => (error: YAMLError) => void; diff --git a/node_modules/yaml/dist/errors.js b/node_modules/yaml/dist/errors.js deleted file mode 100644 index 9d04c60..0000000 --- a/node_modules/yaml/dist/errors.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.max(1, Math.min(end.col - col, 80 - ci)); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; diff --git a/node_modules/yaml/dist/index.d.ts b/node_modules/yaml/dist/index.d.ts deleted file mode 100644 index 66e784e..0000000 --- a/node_modules/yaml/dist/index.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -export { Composer } from './compose/composer.js'; -export { Document } from './doc/Document.js'; -export { Schema } from './schema/Schema.js'; -export { ErrorCode, YAMLError, YAMLParseError, YAMLWarning } from './errors.js'; -export { Alias } from './nodes/Alias.js'; -export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity.js'; -export { Node, ParsedNode, Range } from './nodes/Node.js'; -export { Pair } from './nodes/Pair.js'; -export { Scalar } from './nodes/Scalar.js'; -export { YAMLMap } from './nodes/YAMLMap.js'; -export { YAMLSeq } from './nodes/YAMLSeq.js'; -export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js'; -export * as CST from './parse/cst.js'; -export { Lexer } from './parse/lexer.js'; -export { LineCounter } from './parse/line-counter.js'; -export { Parser } from './parse/parser.js'; -export { EmptyStream, parse, parseAllDocuments, parseDocument, stringify } from './public-api.js'; -export type { TagId, Tags } from './schema/tags'; -export type { CollectionTag, ScalarTag } from './schema/types'; -export type { YAMLOMap } from './schema/yaml-1.1/omap'; -export type { YAMLSet } from './schema/yaml-1.1/set'; -export { asyncVisitor, asyncVisitorFn, visit, visitAsync, visitor, visitorFn } from './visit.js'; diff --git a/node_modules/yaml/dist/index.js b/node_modules/yaml/dist/index.js deleted file mode 100644 index 18c0cb6..0000000 --- a/node_modules/yaml/dist/index.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -var composer = require('./compose/composer.js'); -var Document = require('./doc/Document.js'); -var Schema = require('./schema/Schema.js'); -var errors = require('./errors.js'); -var Alias = require('./nodes/Alias.js'); -var identity = require('./nodes/identity.js'); -var Pair = require('./nodes/Pair.js'); -var Scalar = require('./nodes/Scalar.js'); -var YAMLMap = require('./nodes/YAMLMap.js'); -var YAMLSeq = require('./nodes/YAMLSeq.js'); -var cst = require('./parse/cst.js'); -var lexer = require('./parse/lexer.js'); -var lineCounter = require('./parse/line-counter.js'); -var parser = require('./parse/parser.js'); -var publicApi = require('./public-api.js'); -var visit = require('./visit.js'); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = identity.isAlias; -exports.isCollection = identity.isCollection; -exports.isDocument = identity.isDocument; -exports.isMap = identity.isMap; -exports.isNode = identity.isNode; -exports.isPair = identity.isPair; -exports.isScalar = identity.isScalar; -exports.isSeq = identity.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; diff --git a/node_modules/yaml/dist/log.d.ts b/node_modules/yaml/dist/log.d.ts deleted file mode 100644 index 5e21612..0000000 --- a/node_modules/yaml/dist/log.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export type LogLevelId = 'silent' | 'error' | 'warn' | 'debug'; -export declare function debug(logLevel: LogLevelId, ...messages: any[]): void; -export declare function warn(logLevel: LogLevelId, warning: string | Error): void; diff --git a/node_modules/yaml/dist/log.js b/node_modules/yaml/dist/log.js deleted file mode 100644 index fac7d5a..0000000 --- a/node_modules/yaml/dist/log.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; diff --git a/node_modules/yaml/dist/nodes/Alias.d.ts b/node_modules/yaml/dist/nodes/Alias.d.ts deleted file mode 100644 index b55ebae..0000000 --- a/node_modules/yaml/dist/nodes/Alias.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { FlowScalar } from '../parse/cst.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { NodeBase, Range } from './Node.js'; -import type { Scalar } from './Scalar'; -import { ToJSContext } from './toJS.js'; -import type { YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export declare namespace Alias { - interface Parsed extends Alias { - range: Range; - srcToken?: FlowScalar & { - type: 'alias'; - }; - } -} -export declare class Alias extends NodeBase { - source: string; - anchor?: never; - constructor(source: string); - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc: Document): Scalar | YAMLMap | YAMLSeq | undefined; - toJSON(_arg?: unknown, ctx?: ToJSContext): {} | null; - toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string; -} diff --git a/node_modules/yaml/dist/nodes/Alias.js b/node_modules/yaml/dist/nodes/Alias.js deleted file mode 100644 index 46b37d9..0000000 --- a/node_modules/yaml/dist/nodes/Alias.js +++ /dev/null @@ -1,103 +0,0 @@ -'use strict'; - -var anchors = require('../doc/anchors.js'); -var visit = require('../visit.js'); -var identity = require('./identity.js'); -var Node = require('./Node.js'); -var toJS = require('./toJS.js'); - -class Alias extends Node.NodeBase { - constructor(source) { - super(identity.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - let data = anchors.get(source); - if (!data) { - // Resolve anchors for Node.prototype.toJS() - toJS.toJS(source, null, ctx); - data = anchors.get(source); - } - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (identity.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (identity.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (identity.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; diff --git a/node_modules/yaml/dist/nodes/Collection.d.ts b/node_modules/yaml/dist/nodes/Collection.d.ts deleted file mode 100644 index 0b5979d..0000000 --- a/node_modules/yaml/dist/nodes/Collection.d.ts +++ /dev/null @@ -1,73 +0,0 @@ -import type { Schema } from '../schema/Schema.js'; -import { NODE_TYPE } from './identity.js'; -import { NodeBase } from './Node.js'; -export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import("./Node.js").Node; -export declare const isEmptyPath: (path: Iterable | null | undefined) => path is null | undefined; -export declare abstract class Collection extends NodeBase { - schema: Schema | undefined; - [NODE_TYPE]: symbol; - items: unknown[]; - /** An optional anchor on this node. Used by alias nodes. */ - anchor?: string; - /** - * If true, stringify this and all child nodes using flow rather than - * block styles. - */ - flow?: boolean; - constructor(type: symbol, schema?: Schema); - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema?: Schema): Collection; - /** Adds a value to the collection. */ - abstract add(value: unknown): void; - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - abstract delete(key: unknown): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - abstract get(key: unknown, keepScalar?: boolean): unknown; - /** - * Checks if the collection includes a value with the key `key`. - */ - abstract has(key: unknown): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - abstract set(key: unknown, value: unknown): void; - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path: Iterable, value: unknown): void; - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path: Iterable): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path: Iterable, keepScalar?: boolean): unknown; - hasAllNullValues(allowScalar?: boolean): boolean; - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path: Iterable): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path: Iterable, value: unknown): void; -} diff --git a/node_modules/yaml/dist/nodes/Collection.js b/node_modules/yaml/dist/nodes/Collection.js deleted file mode 100644 index bdf8cb4..0000000 --- a/node_modules/yaml/dist/nodes/Collection.js +++ /dev/null @@ -1,151 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var identity = require('./identity.js'); -var Node = require('./Node.js'); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (identity.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (identity.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && identity.isScalar(node) ? node.value : node; - else - return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!identity.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - identity.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return identity.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (identity.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; diff --git a/node_modules/yaml/dist/nodes/Node.d.ts b/node_modules/yaml/dist/nodes/Node.d.ts deleted file mode 100644 index 50564c8..0000000 --- a/node_modules/yaml/dist/nodes/Node.d.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { ToJSOptions } from '../options.js'; -import { Token } from '../parse/cst.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import type { Alias } from './Alias.js'; -import { NODE_TYPE } from './identity.js'; -import type { Scalar } from './Scalar.js'; -import { ToJSContext } from './toJS.js'; -import type { MapLike, YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export type Node = Alias | Scalar | YAMLMap | YAMLSeq; -/** Utility type mapper */ -export type NodeType = T extends string | number | bigint | boolean | null | undefined ? Scalar : T extends Date ? Scalar : T extends Array ? YAMLSeq> : T extends { - [key: string]: any; -} ? YAMLMap, NodeType> : T extends { - [key: number]: any; -} ? YAMLMap, NodeType> : Node; -export type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed; -/** `[start, value-end, node-end]` */ -export type Range = [number, number, number]; -export declare abstract class NodeBase { - readonly [NODE_TYPE]: symbol; - /** A comment on or immediately after this */ - comment?: string | null; - /** A comment before this */ - commentBefore?: string | null; - /** - * The `[start, value-end, node-end]` character offsets for the part of the - * source parsed into this node (undefined if not parsed). The `value-end` - * and `node-end` positions are themselves not included in their respective - * ranges. - */ - range?: Range | null; - /** A blank line before this node and its commentBefore */ - spaceBefore?: boolean; - /** The CST token that was composed into this node. */ - srcToken?: Token; - /** A fully qualified tag, if required */ - tag?: string; - /** - * Customize the way that a key-value pair is resolved. - * Used for YAML 1.1 !!merge << handling. - */ - addToJSMap?: (ctx: ToJSContext | undefined, map: MapLike, value: unknown) => void; - /** A plain JS representation of this node */ - abstract toJSON(): any; - abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - constructor(type: symbol); - /** Create a copy of this node. */ - clone(): NodeBase; - /** A plain JavaScript representation of this node. */ - toJS(doc: Document, { mapAsMap, maxAliasCount, onAnchor, reviver }?: ToJSOptions): any; -} diff --git a/node_modules/yaml/dist/nodes/Node.js b/node_modules/yaml/dist/nodes/Node.js deleted file mode 100644 index d384e1c..0000000 --- a/node_modules/yaml/dist/nodes/Node.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; - -var applyReviver = require('../doc/applyReviver.js'); -var identity = require('./identity.js'); -var toJS = require('./toJS.js'); - -class NodeBase { - constructor(type) { - Object.defineProperty(this, identity.NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** A plain JavaScript representation of this node. */ - toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - if (!identity.isDocument(doc)) - throw new TypeError('A document argument is required'); - const ctx = { - anchors: new Map(), - doc, - keep: true, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS.toJS(this, '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } -} - -exports.NodeBase = NodeBase; diff --git a/node_modules/yaml/dist/nodes/Pair.d.ts b/node_modules/yaml/dist/nodes/Pair.d.ts deleted file mode 100644 index 6178d3a..0000000 --- a/node_modules/yaml/dist/nodes/Pair.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { CreateNodeContext } from '../doc/createNode.js'; -import type { CollectionItem } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { NODE_TYPE } from './identity.js'; -import type { ToJSContext } from './toJS.js'; -export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair | import("./YAMLMap.js").YAMLMap | import("./YAMLSeq.js").YAMLSeq>; -export declare class Pair { - readonly [NODE_TYPE]: symbol; - /** Always Node or null when parsed, but can be set to anything. */ - key: K; - /** Always Node or null when parsed, but can be set to anything. */ - value: V | null; - /** The CST token that was composed into this pair. */ - srcToken?: CollectionItem; - constructor(key: K, value?: V | null); - clone(schema?: Schema): Pair; - toJSON(_?: unknown, ctx?: ToJSContext): ReturnType; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -} diff --git a/node_modules/yaml/dist/nodes/Pair.js b/node_modules/yaml/dist/nodes/Pair.js deleted file mode 100644 index ae4c772..0000000 --- a/node_modules/yaml/dist/nodes/Pair.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var stringifyPair = require('../stringify/stringifyPair.js'); -var addPairToJSMap = require('./addPairToJSMap.js'); -var identity = require('./identity.js'); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (identity.isNode(key)) - key = key.clone(schema); - if (identity.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; diff --git a/node_modules/yaml/dist/nodes/Scalar.d.ts b/node_modules/yaml/dist/nodes/Scalar.d.ts deleted file mode 100644 index dd330b2..0000000 --- a/node_modules/yaml/dist/nodes/Scalar.d.ts +++ /dev/null @@ -1,42 +0,0 @@ -import type { BlockScalar, FlowScalar } from '../parse/cst.js'; -import { NodeBase, Range } from './Node.js'; -import { ToJSContext } from './toJS.js'; -export declare const isScalarValue: (value: unknown) => boolean; -export declare namespace Scalar { - interface Parsed extends Scalar { - range: Range; - source: string; - srcToken?: FlowScalar | BlockScalar; - } - type BLOCK_FOLDED = 'BLOCK_FOLDED'; - type BLOCK_LITERAL = 'BLOCK_LITERAL'; - type PLAIN = 'PLAIN'; - type QUOTE_DOUBLE = 'QUOTE_DOUBLE'; - type QUOTE_SINGLE = 'QUOTE_SINGLE'; - type Type = BLOCK_FOLDED | BLOCK_LITERAL | PLAIN | QUOTE_DOUBLE | QUOTE_SINGLE; -} -export declare class Scalar extends NodeBase { - static readonly BLOCK_FOLDED = "BLOCK_FOLDED"; - static readonly BLOCK_LITERAL = "BLOCK_LITERAL"; - static readonly PLAIN = "PLAIN"; - static readonly QUOTE_DOUBLE = "QUOTE_DOUBLE"; - static readonly QUOTE_SINGLE = "QUOTE_SINGLE"; - value: T; - /** An optional anchor on this node. Used by alias nodes. */ - anchor?: string; - /** - * By default (undefined), numbers use decimal notation. - * The YAML 1.2 core schema only supports 'HEX' and 'OCT'. - * The YAML 1.1 schema also supports 'BIN' and 'TIME' - */ - format?: string; - /** If `value` is a number, use this value when stringifying this node. */ - minFractionDigits?: number; - /** Set during parsing to the source string value */ - source?: string; - /** The scalar style used for the node's string representation */ - type?: Scalar.Type; - constructor(value: T); - toJSON(arg?: any, ctx?: ToJSContext): any; - toString(): string; -} diff --git a/node_modules/yaml/dist/nodes/Scalar.js b/node_modules/yaml/dist/nodes/Scalar.js deleted file mode 100644 index bd7d4d2..0000000 --- a/node_modules/yaml/dist/nodes/Scalar.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; - -var identity = require('./identity.js'); -var Node = require('./Node.js'); -var toJS = require('./toJS.js'); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(identity.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; diff --git a/node_modules/yaml/dist/nodes/YAMLMap.d.ts b/node_modules/yaml/dist/nodes/YAMLMap.d.ts deleted file mode 100644 index cef75f0..0000000 --- a/node_modules/yaml/dist/nodes/YAMLMap.d.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { BlockMap, FlowCollection } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { CreateNodeContext } from '../util.js'; -import { Collection } from './Collection.js'; -import type { ParsedNode, Range } from './Node.js'; -import { Pair } from './Pair.js'; -import { Scalar } from './Scalar.js'; -import type { ToJSContext } from './toJS.js'; -export type MapLike = Map | Set | Record; -export declare function findPair(items: Iterable>, key: unknown): Pair | undefined; -export declare namespace YAMLMap { - interface Parsed extends YAMLMap { - items: Pair[]; - range: Range; - srcToken?: BlockMap | FlowCollection; - } -} -export declare class YAMLMap extends Collection { - static get tagName(): 'tag:yaml.org,2002:map'; - items: Pair[]; - constructor(schema?: Schema); - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLMap; - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair: Pair | { - key: K; - value: V; - }, overwrite?: boolean): void; - delete(key: unknown): boolean; - get(key: unknown, keepScalar: true): Scalar | undefined; - get(key: unknown, keepScalar?: false): V | undefined; - get(key: unknown, keepScalar?: boolean): V | Scalar | undefined; - has(key: unknown): boolean; - set(key: K, value: V): void; - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON>(_?: unknown, ctx?: ToJSContext, Type?: { - new (): T; - }): any; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -} diff --git a/node_modules/yaml/dist/nodes/YAMLMap.js b/node_modules/yaml/dist/nodes/YAMLMap.js deleted file mode 100644 index 210abbf..0000000 --- a/node_modules/yaml/dist/nodes/YAMLMap.js +++ /dev/null @@ -1,147 +0,0 @@ -'use strict'; - -var stringifyCollection = require('../stringify/stringifyCollection.js'); -var addPairToJSMap = require('./addPairToJSMap.js'); -var Collection = require('./Collection.js'); -var identity = require('./identity.js'); -var Pair = require('./Pair.js'); -var Scalar = require('./Scalar.js'); - -function findPair(items, key) { - const k = identity.isScalar(key) ? key.value : key; - for (const it of items) { - if (identity.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (identity.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - constructor(schema) { - super(identity.MAP, schema); - this.items = []; - } - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new this(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (identity.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!identity.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.d.ts b/node_modules/yaml/dist/nodes/YAMLSeq.d.ts deleted file mode 100644 index 47fe24d..0000000 --- a/node_modules/yaml/dist/nodes/YAMLSeq.d.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { CreateNodeContext } from '../doc/createNode.js'; -import type { BlockSequence, FlowCollection } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { Collection } from './Collection.js'; -import type { ParsedNode, Range } from './Node.js'; -import type { Pair } from './Pair.js'; -import { Scalar } from './Scalar.js'; -import { ToJSContext } from './toJS.js'; -export declare namespace YAMLSeq { - interface Parsed = ParsedNode> extends YAMLSeq { - items: T[]; - range: Range; - srcToken?: BlockSequence | FlowCollection; - } -} -export declare class YAMLSeq extends Collection { - static get tagName(): 'tag:yaml.org,2002:seq'; - items: T[]; - constructor(schema?: Schema); - add(value: T): void; - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key: unknown): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - get(key: unknown, keepScalar: true): Scalar | undefined; - get(key: unknown, keepScalar?: false): T | undefined; - get(key: unknown, keepScalar?: boolean): T | Scalar | undefined; - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key: unknown): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key: unknown, value: T): void; - toJSON(_?: unknown, ctx?: ToJSContext): unknown[]; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLSeq; -} diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.js b/node_modules/yaml/dist/nodes/YAMLSeq.js deleted file mode 100644 index a2af086..0000000 --- a/node_modules/yaml/dist/nodes/YAMLSeq.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var stringifyCollection = require('../stringify/stringifyCollection.js'); -var Collection = require('./Collection.js'); -var identity = require('./identity.js'); -var Scalar = require('./Scalar.js'); -var toJS = require('./toJS.js'); - -class YAMLSeq extends Collection.Collection { - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - constructor(schema) { - super(identity.SEQ, schema); - this.items = []; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && identity.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (identity.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } - static from(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new this(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; - } -} -function asItemIndex(key) { - let idx = identity.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts b/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts deleted file mode 100644 index 70d9e62..0000000 --- a/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { Pair } from './Pair.js'; -import { ToJSContext } from './toJS.js'; -import type { MapLike } from './YAMLMap.js'; -export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: MapLike, { key, value }: Pair): MapLike; diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.js b/node_modules/yaml/dist/nodes/addPairToJSMap.js deleted file mode 100644 index 755468c..0000000 --- a/node_modules/yaml/dist/nodes/addPairToJSMap.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -var log = require('../log.js'); -var merge = require('../schema/yaml-1.1/merge.js'); -var stringify = require('../stringify/stringify.js'); -var identity = require('./identity.js'); -var toJS = require('./toJS.js'); - -function addPairToJSMap(ctx, map, { key, value }) { - if (identity.isNode(key) && key.addToJSMap) - key.addToJSMap(ctx, map, value); - // TODO: Should drop this special case for bare << handling - else if (merge.isMergeKey(ctx, key)) - merge.addMergeToJSMap(ctx, map, value); - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (identity.isNode(key) && ctx?.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; diff --git a/node_modules/yaml/dist/nodes/identity.d.ts b/node_modules/yaml/dist/nodes/identity.d.ts deleted file mode 100644 index a576b1a..0000000 --- a/node_modules/yaml/dist/nodes/identity.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Alias } from './Alias.js'; -import type { Node } from './Node.js'; -import type { Pair } from './Pair.js'; -import type { Scalar } from './Scalar.js'; -import type { YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export declare const ALIAS: unique symbol; -export declare const DOC: unique symbol; -export declare const MAP: unique symbol; -export declare const PAIR: unique symbol; -export declare const SCALAR: unique symbol; -export declare const SEQ: unique symbol; -export declare const NODE_TYPE: unique symbol; -export declare const isAlias: (node: any) => node is Alias; -export declare const isDocument: (node: any) => node is Document; -export declare const isMap: (node: any) => node is YAMLMap; -export declare const isPair: (node: any) => node is Pair; -export declare const isScalar: (node: any) => node is Scalar; -export declare const isSeq: (node: any) => node is YAMLSeq; -export declare function isCollection(node: any): node is YAMLMap | YAMLSeq; -export declare function isNode(node: any): node is Node; -export declare const hasAnchor: (node: unknown) => node is Scalar | YAMLMap | YAMLSeq; diff --git a/node_modules/yaml/dist/nodes/identity.js b/node_modules/yaml/dist/nodes/identity.js deleted file mode 100644 index 5794aa3..0000000 --- a/node_modules/yaml/dist/nodes/identity.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict'; - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; diff --git a/node_modules/yaml/dist/nodes/toJS.d.ts b/node_modules/yaml/dist/nodes/toJS.d.ts deleted file mode 100644 index dcd0642..0000000 --- a/node_modules/yaml/dist/nodes/toJS.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Node } from './Node.js'; -export interface AnchorData { - aliasCount: number; - count: number; - res: unknown; -} -export interface ToJSContext { - anchors: Map; - doc: Document; - keep: boolean; - mapAsMap: boolean; - mapKeyWarned: boolean; - maxAliasCount: number; - onCreate?: (res: unknown) => void; -} -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -export declare function toJS(value: any, arg: string | null, ctx?: ToJSContext): any; diff --git a/node_modules/yaml/dist/nodes/toJS.js b/node_modules/yaml/dist/nodes/toJS.js deleted file mode 100644 index a012823..0000000 --- a/node_modules/yaml/dist/nodes/toJS.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var identity = require('./identity.js'); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !identity.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; diff --git a/node_modules/yaml/dist/options.d.ts b/node_modules/yaml/dist/options.d.ts deleted file mode 100644 index fcf9898..0000000 --- a/node_modules/yaml/dist/options.d.ts +++ /dev/null @@ -1,344 +0,0 @@ -import type { Reviver } from './doc/applyReviver.js'; -import type { Directives } from './doc/directives.js'; -import type { LogLevelId } from './log.js'; -import type { ParsedNode } from './nodes/Node.js'; -import type { Pair } from './nodes/Pair.js'; -import type { Scalar } from './nodes/Scalar.js'; -import type { LineCounter } from './parse/line-counter.js'; -import type { Schema } from './schema/Schema.js'; -import type { Tags } from './schema/tags.js'; -import type { CollectionTag, ScalarTag } from './schema/types.js'; -export type ParseOptions = { - /** - * Whether integers should be parsed into BigInt rather than number values. - * - * Default: `false` - * - * https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/BigInt - */ - intAsBigInt?: boolean; - /** - * Include a `srcToken` value on each parsed `Node`, containing the CST token - * that was composed into this node. - * - * Default: `false` - */ - keepSourceTokens?: boolean; - /** - * If set, newlines will be tracked, to allow for `lineCounter.linePos(offset)` - * to provide the `{ line, col }` positions within the input. - */ - lineCounter?: LineCounter; - /** - * Include line/col position & node type directly in parse errors. - * - * Default: `true` - */ - prettyErrors?: boolean; - /** - * Detect and report errors that are required by the YAML 1.2 spec, - * but are caused by unambiguous content. - * - * Default: `true` - */ - strict?: boolean; - /** - * Parse all mapping keys as strings. Treat all non-scalar keys as errors. - * - * Default: `false` - */ - stringKeys?: boolean; - /** - * YAML requires map keys to be unique. By default, this is checked by - * comparing scalar values with `===`; deep equality is not checked for - * aliases or collections. If merge keys are enabled by the schema, - * multiple `<<` keys are allowed. - * - * Set `false` to disable, or provide your own comparator function to - * customise. The comparator will be passed two `ParsedNode` values, and - * is expected to return a `boolean` indicating their equality. - * - * Default: `true` - */ - uniqueKeys?: boolean | ((a: ParsedNode, b: ParsedNode) => boolean); -}; -export type DocumentOptions = { - /** - * @internal - * Used internally by Composer. If set and includes an explicit version, - * that overrides the `version` option. - */ - _directives?: Directives; - /** - * Control the logging level during parsing - * - * Default: `'warn'` - */ - logLevel?: LogLevelId; - /** - * The YAML version used by documents without a `%YAML` directive. - * - * Default: `"1.2"` - */ - version?: '1.1' | '1.2' | 'next'; -}; -export type SchemaOptions = { - /** - * When parsing, warn about compatibility issues with the given schema. - * When stringifying, use scalar styles that are parsed correctly - * by the `compat` schema as well as the actual schema. - * - * Default: `null` - */ - compat?: string | Tags | null; - /** - * Array of additional tags to include in the schema, or a function that may - * modify the schema's base tag array. - */ - customTags?: Tags | ((tags: Tags) => Tags) | null; - /** - * Enable support for `<<` merge keys. - * - * Default: `false` for YAML 1.2, `true` for earlier versions - */ - merge?: boolean; - /** - * When using the `'core'` schema, support parsing values with these - * explicit YAML 1.1 tags: - * - * `!!binary`, `!!omap`, `!!pairs`, `!!set`, `!!timestamp`. - * - * Default `true` - */ - resolveKnownTags?: boolean; - /** - * The base schema to use. - * - * The core library has built-in support for the following: - * - `'failsafe'`: A minimal schema that parses all scalars as strings - * - `'core'`: The YAML 1.2 core schema - * - `'json'`: The YAML 1.2 JSON schema, with minimal rules for JSON compatibility - * - `'yaml-1.1'`: The YAML 1.1 schema - * - * If using another (custom) schema, the `customTags` array needs to - * fully define the schema's tags. - * - * Default: `'core'` for YAML 1.2, `'yaml-1.1'` for earlier versions - */ - schema?: string | Schema; - /** - * When adding to or stringifying a map, sort the entries. - * If `true`, sort by comparing key values with `<`. - * Does not affect item order when parsing. - * - * Default: `false` - */ - sortMapEntries?: boolean | ((a: Pair, b: Pair) => number); - /** - * Override default values for `toString()` options. - */ - toStringDefaults?: ToStringOptions; -}; -export type CreateNodeOptions = { - /** - * During node construction, use anchors and aliases to keep strictly equal - * non-null objects as equivalent in YAML. - * - * Default: `true` - */ - aliasDuplicateObjects?: boolean; - /** - * Default prefix for anchors. - * - * Default: `'a'`, resulting in anchors `a1`, `a2`, etc. - */ - anchorPrefix?: string; - /** Force the top-level collection node to use flow style. */ - flow?: boolean; - /** - * Keep `undefined` object values when creating mappings, rather than - * discarding them. - * - * Default: `false` - */ - keepUndefined?: boolean | null; - onTagObj?: (tagObj: ScalarTag | CollectionTag) => void; - /** - * Specify the top-level collection type, e.g. `"!!omap"`. Note that this - * requires the corresponding tag to be available in this document's schema. - */ - tag?: string; -}; -export type ToJSOptions = { - /** - * Use Map rather than Object to represent mappings. - * - * Default: `false` - */ - mapAsMap?: boolean; - /** - * Prevent exponential entity expansion attacks by limiting data aliasing count; - * set to `-1` to disable checks; `0` disallows all alias nodes. - * - * Default: `100` - */ - maxAliasCount?: number; - /** - * If defined, called with the resolved `value` and reference `count` for - * each anchor in the document. - */ - onAnchor?: (value: unknown, count: number) => void; - /** - * Optional function that may filter or modify the output JS value - * - * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter - */ - reviver?: Reviver; -}; -export type ToStringOptions = { - /** - * Use block quote styles for scalar values where applicable. - * Set to `false` to disable block quotes completely. - * - * Default: `true` - */ - blockQuote?: boolean | 'folded' | 'literal'; - /** - * Enforce `'block'` or `'flow'` style on maps and sequences. - * Empty collections will always be stringified as `{}` or `[]`. - * - * Default: `'any'`, allowing each node to set its style separately - * with its `flow: boolean` (default `false`) property. - */ - collectionStyle?: 'any' | 'block' | 'flow'; - /** - * Comment stringifier. - * Output should be valid for the current schema. - * - * By default, empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ - commentString?: (comment: string) => string; - /** - * The default type of string literal used to stringify implicit key values. - * Output may use other types if required to fully represent the value. - * - * If `null`, the value of `defaultStringType` is used. - * - * Default: `null` - */ - defaultKeyType?: Scalar.Type | null; - /** - * The default type of string literal used to stringify values in general. - * Output may use other types if required to fully represent the value. - * - * Default: `'PLAIN'` - */ - defaultStringType?: Scalar.Type; - /** - * Include directives in the output. - * - * - If `true`, at least the document-start marker `---` is always included. - * This does not force the `%YAML` directive to be included. To do that, - * set `doc.directives.yaml.explicit = true`. - * - If `false`, no directives or marker is ever included. If using the `%TAG` - * directive, you are expected to include it manually in the stream before - * its use. - * - If `null`, directives and marker may be included if required. - * - * Default: `null` - */ - directives?: boolean | null; - /** - * Restrict double-quoted strings to use JSON-compatible syntax. - * - * Default: `false` - */ - doubleQuotedAsJSON?: boolean; - /** - * Minimum length for double-quoted strings to use multiple lines to - * represent the value. Ignored if `doubleQuotedAsJSON` is set. - * - * Default: `40` - */ - doubleQuotedMinMultiLineLength?: number; - /** - * String representation for `false`. - * With the core schema, use `'false'`, `'False'`, or `'FALSE'`. - * - * Default: `'false'` - */ - falseStr?: string; - /** - * When true, a single space of padding will be added inside the delimiters - * of non-empty single-line flow collections. - * - * Default: `true` - */ - flowCollectionPadding?: boolean; - /** - * The number of spaces to use when indenting code. - * - * Default: `2` - */ - indent?: number; - /** - * Whether block sequences should be indented. - * - * Default: `true` - */ - indentSeq?: boolean; - /** - * Maximum line width (set to `0` to disable folding). - * - * This is a soft limit, as only double-quoted semantics allow for inserting - * a line break in the middle of a word, as well as being influenced by the - * `minContentWidth` option. - * - * Default: `80` - */ - lineWidth?: number; - /** - * Minimum line width for highly-indented content (set to `0` to disable). - * - * Default: `20` - */ - minContentWidth?: number; - /** - * String representation for `null`. - * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty - * string `''`. - * - * Default: `'null'` - */ - nullStr?: string; - /** - * Require keys to be scalars and to use implicit rather than explicit notation. - * - * Default: `false` - */ - simpleKeys?: boolean; - /** - * Use 'single quote' rather than "double quote" where applicable. - * Set to `false` to disable single quotes completely. - * - * Default: `null` - */ - singleQuote?: boolean | null; - /** - * String representation for `true`. - * With the core schema, use `'true'`, `'True'`, or `'TRUE'`. - * - * Default: `'true'` - */ - trueStr?: string; - /** - * The anchor used by an alias must be defined before the alias node. As it's - * possible for the document to be modified manually, the order may be - * verified during stringification. - * - * Default: `'true'` - */ - verifyAliasOrder?: boolean; -}; diff --git a/node_modules/yaml/dist/parse/cst-scalar.d.ts b/node_modules/yaml/dist/parse/cst-scalar.d.ts deleted file mode 100644 index a7bd1d6..0000000 --- a/node_modules/yaml/dist/parse/cst-scalar.d.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { ErrorCode } from '../errors.js'; -import { Range } from '../nodes/Node.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst.js'; -/** - * If `token` is a CST flow or block scalar, determine its string value and a few other attributes. - * Otherwise, return `null`. - */ -export declare function resolveAsScalar(token: FlowScalar | BlockScalar, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): { - value: string; - type: Scalar.Type | null; - comment: string; - range: Range; -}; -export declare function resolveAsScalar(token: Token | null | undefined, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): { - value: string; - type: Scalar.Type | null; - comment: string; - range: Range; -} | null; -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -export declare function createScalarToken(value: string, context: { - end?: SourceToken[]; - implicitKey?: boolean; - indent: number; - inFlow?: boolean; - offset?: number; - type?: Scalar.Type; -}): BlockScalar | FlowScalar; -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -export declare function setScalarValue(token: Token, value: string, context?: { - afterKey?: boolean; - implicitKey?: boolean; - inFlow?: boolean; - type?: Scalar.Type; -}): void; diff --git a/node_modules/yaml/dist/parse/cst-scalar.js b/node_modules/yaml/dist/parse/cst-scalar.js deleted file mode 100644 index 81b8463..0000000 --- a/node_modules/yaml/dist/parse/cst-scalar.js +++ /dev/null @@ -1,218 +0,0 @@ -'use strict'; - -var resolveBlockScalar = require('../compose/resolve-block-scalar.js'); -var resolveFlowScalar = require('../compose/resolve-flow-scalar.js'); -var errors = require('../errors.js'); -var stringifyString = require('../stringify/stringifyString.js'); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar({ options: { strict } }, token, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; diff --git a/node_modules/yaml/dist/parse/cst-stringify.d.ts b/node_modules/yaml/dist/parse/cst-stringify.d.ts deleted file mode 100644 index dbf66d6..0000000 --- a/node_modules/yaml/dist/parse/cst-stringify.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { CollectionItem, Token } from './cst.js'; -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -export declare const stringify: (cst: Token | CollectionItem) => string; diff --git a/node_modules/yaml/dist/parse/cst-stringify.js b/node_modules/yaml/dist/parse/cst-stringify.js deleted file mode 100644 index 78e8c37..0000000 --- a/node_modules/yaml/dist/parse/cst-stringify.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; diff --git a/node_modules/yaml/dist/parse/cst-visit.d.ts b/node_modules/yaml/dist/parse/cst-visit.d.ts deleted file mode 100644 index 4f21f05..0000000 --- a/node_modules/yaml/dist/parse/cst-visit.d.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { CollectionItem, Document } from './cst.js'; -export type VisitPath = readonly ['key' | 'value', number][]; -export type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void; -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -export declare function visit(cst: Document | CollectionItem, visitor: Visitor): void; -export declare namespace visit { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; - var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined; - var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => import("./cst.js").BlockMap | import("./cst.js").BlockSequence | import("./cst.js").FlowCollection; -} diff --git a/node_modules/yaml/dist/parse/cst-visit.js b/node_modules/yaml/dist/parse/cst-visit.js deleted file mode 100644 index 9ceee93..0000000 --- a/node_modules/yaml/dist/parse/cst-visit.js +++ /dev/null @@ -1,99 +0,0 @@ -'use strict'; - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; diff --git a/node_modules/yaml/dist/parse/cst.d.ts b/node_modules/yaml/dist/parse/cst.d.ts deleted file mode 100644 index 18bdb04..0000000 --- a/node_modules/yaml/dist/parse/cst.d.ts +++ /dev/null @@ -1,108 +0,0 @@ -export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js'; -export { stringify } from './cst-stringify.js'; -export { visit, Visitor, VisitPath } from './cst-visit.js'; -export interface SourceToken { - type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header'; - offset: number; - indent: number; - source: string; -} -export interface ErrorToken { - type: 'error'; - offset: number; - source: string; - message: string; -} -export interface Directive { - type: 'directive'; - offset: number; - source: string; -} -export interface Document { - type: 'document'; - offset: number; - start: SourceToken[]; - value?: Token; - end?: SourceToken[]; -} -export interface DocumentEnd { - type: 'doc-end'; - offset: number; - source: string; - end?: SourceToken[]; -} -export interface FlowScalar { - type: 'alias' | 'scalar' | 'single-quoted-scalar' | 'double-quoted-scalar'; - offset: number; - indent: number; - source: string; - end?: SourceToken[]; -} -export interface BlockScalar { - type: 'block-scalar'; - offset: number; - indent: number; - props: Token[]; - source: string; -} -export interface BlockMap { - type: 'block-map'; - offset: number; - indent: number; - items: Array<{ - start: SourceToken[]; - explicitKey?: true; - key?: never; - sep?: never; - value?: never; - } | { - start: SourceToken[]; - explicitKey?: true; - key: Token | null; - sep: SourceToken[]; - value?: Token; - }>; -} -export interface BlockSequence { - type: 'block-seq'; - offset: number; - indent: number; - items: Array<{ - start: SourceToken[]; - key?: never; - sep?: never; - value?: Token; - }>; -} -export type CollectionItem = { - start: SourceToken[]; - key?: Token | null; - sep?: SourceToken[]; - value?: Token; -}; -export interface FlowCollection { - type: 'flow-collection'; - offset: number; - indent: number; - start: SourceToken; - items: CollectionItem[]; - end: SourceToken[]; -} -export type Token = SourceToken | ErrorToken | Directive | Document | DocumentEnd | FlowScalar | BlockScalar | BlockMap | BlockSequence | FlowCollection; -export type TokenType = SourceToken['type'] | DocumentEnd['type'] | FlowScalar['type']; -/** The byte order mark */ -export declare const BOM = "\uFEFF"; -/** Start of doc-mode */ -export declare const DOCUMENT = "\u0002"; -/** Unexpected end of flow-mode */ -export declare const FLOW_END = "\u0018"; -/** Next token is a scalar value */ -export declare const SCALAR = "\u001F"; -/** @returns `true` if `token` is a flow or block collection */ -export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -export declare const isScalar: (token: Token | null | undefined) => token is FlowScalar | BlockScalar; -/** Get a printable representation of a lexer token */ -export declare function prettyToken(token: string): string; -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -export declare function tokenType(source: string): TokenType | null; diff --git a/node_modules/yaml/dist/parse/cst.js b/node_modules/yaml/dist/parse/cst.js deleted file mode 100644 index 613c229..0000000 --- a/node_modules/yaml/dist/parse/cst.js +++ /dev/null @@ -1,112 +0,0 @@ -'use strict'; - -var cstScalar = require('./cst-scalar.js'); -var cstStringify = require('./cst-stringify.js'); -var cstVisit = require('./cst-visit.js'); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; diff --git a/node_modules/yaml/dist/parse/lexer.d.ts b/node_modules/yaml/dist/parse/lexer.d.ts deleted file mode 100644 index e00fb52..0000000 --- a/node_modules/yaml/dist/parse/lexer.d.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -export declare class Lexer { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - private atEnd; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - private blockScalarIndent; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - private blockScalarKeep; - /** Current input */ - private buffer; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - private flowKey; - /** Count of surrounding flow collection levels. */ - private flowLevel; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - private indentNext; - /** Indentation level of the current line. */ - private indentValue; - /** Position of the next \n character. */ - private lineEndPos; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - private next; - /** A pointer to `buffer`; the current position of the lexer. */ - private pos; - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - lex(source: string, incomplete?: boolean): Generator; - private atLineEnd; - private charAt; - private continueScalar; - private getLine; - private hasChars; - private setNext; - private peek; - private parseNext; - private parseStream; - private parseLineStart; - private parseBlockStart; - private parseDocument; - private parseFlowCollection; - private parseQuotedScalar; - private parseBlockScalarHeader; - private parseBlockScalar; - private parsePlainScalar; - private pushCount; - private pushToIndex; - private pushIndicators; - private pushTag; - private pushNewline; - private pushSpaces; - private pushUntil; -} diff --git a/node_modules/yaml/dist/parse/lexer.js b/node_modules/yaml/dist/parse/lexer.js deleted file mode 100644 index 9ac766e..0000000 --- a/node_modules/yaml/dist/parse/lexer.js +++ /dev/null @@ -1,719 +0,0 @@ -'use strict'; - -var cst = require('./cst.js'); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = new Set('0123456789ABCDEFabcdef'); -const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()"); -const flowIndicatorChars = new Set(',[]{}'); -const invalidAnchorChars = new Set(' ,[]{}\n\r\t'); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - if (typeof source !== 'string') - throw TypeError('source is not a string'); - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - let cs = line.indexOf('#'); - while (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') { - dirEnd = cs - 1; - break; - } - else { - cs = line.indexOf('#', cs + 1); - } - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return s === '---' ? 'doc' : 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else { - this.indentNext = - this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext); - } - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - // Trailing insufficiently indented tabs are invalid. - // To catch that during parsing, we include them in the block scalar value. - let i = nl + 1; - ch = this.buffer[i]; - while (ch === ' ') - ch = this.buffer[++i]; - if (ch === '\t') { - while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n') - ch = this.buffer[++i]; - nl = i - 1; - } - else if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next))) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && flowIndicatorChars.has(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && flowIndicatorChars.has(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.has(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.has(this.buffer[i + 1]) && - hexDigits.has(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; diff --git a/node_modules/yaml/dist/parse/line-counter.d.ts b/node_modules/yaml/dist/parse/line-counter.d.ts deleted file mode 100644 index b469095..0000000 --- a/node_modules/yaml/dist/parse/line-counter.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -export declare class LineCounter { - lineStarts: number[]; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - addNewLine: (offset: number) => number; - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - linePos: (offset: number) => { - line: number; - col: number; - }; -} diff --git a/node_modules/yaml/dist/parse/line-counter.js b/node_modules/yaml/dist/parse/line-counter.js deleted file mode 100644 index 0e7383b..0000000 --- a/node_modules/yaml/dist/parse/line-counter.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; diff --git a/node_modules/yaml/dist/parse/parser.d.ts b/node_modules/yaml/dist/parse/parser.d.ts deleted file mode 100644 index e7b8cfd..0000000 --- a/node_modules/yaml/dist/parse/parser.d.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { Token } from './cst.js'; -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -export declare class Parser { - private onNewLine?; - /** If true, space and sequence indicators count as indentation */ - private atNewLine; - /** If true, next token is a scalar value */ - private atScalar; - /** Current indentation level */ - private indent; - /** Current offset since the start of parsing */ - offset: number; - /** On the same line with a block map key */ - private onKeyLine; - /** Top indicates the node that's currently being built */ - stack: Token[]; - /** The source of the current token, set in parse() */ - private source; - /** The type of the current token, set in parse() */ - private type; - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine?: (offset: number) => void); - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - parse(source: string, incomplete?: boolean): Generator; - /** - * Advance the parser by the `source` of one lexical token. - */ - next(source: string): Generator; - private lexer; - /** Call at end of input to push out any remaining constructions */ - end(): Generator; - private get sourceToken(); - private step; - private peek; - private pop; - private stream; - private document; - private scalar; - private blockScalar; - private blockMap; - private blockSequence; - private flowCollection; - private flowScalar; - private startBlockValue; - private atIndentedComment; - private documentEnd; - private lineEnd; -} diff --git a/node_modules/yaml/dist/parse/parser.js b/node_modules/yaml/dist/parse/parser.js deleted file mode 100644 index 9e6ffe9..0000000 --- a/node_modules/yaml/dist/parse/parser.js +++ /dev/null @@ -1,958 +0,0 @@ -'use strict'; - -var cst = require('./cst.js'); -var lexer = require('./lexer.js'); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !it.explicitKey; - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atMapIndent = !this.onKeyLine && this.indent === map.indent; - const atNextItem = atMapIndent && - (it.sep || it.explicitKey) && - this.type !== 'seq-item-ind'; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !it.explicitKey) { - it.start.push(this.sourceToken); - it.explicitKey = true; - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start, explicitKey: true }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken], explicitKey: true }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (it.explicitKey) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key; - // @ts-expect-error type guard is wrong here - delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atMapIndent && bv.type !== 'block-seq') { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, explicitKey: true }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; diff --git a/node_modules/yaml/dist/public-api.d.ts b/node_modules/yaml/dist/public-api.d.ts deleted file mode 100644 index 2b771ca..0000000 --- a/node_modules/yaml/dist/public-api.d.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { Composer } from './compose/composer.js'; -import type { Reviver } from './doc/applyReviver.js'; -import { Document, Replacer } from './doc/Document.js'; -import type { Node, ParsedNode } from './nodes/Node.js'; -import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js'; -export interface EmptyStream extends Array, ReturnType { - empty: true; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -export declare function parseAllDocuments(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Array : Document> | EmptyStream; -/** Parse an input string into a single YAML.Document */ -export declare function parseDocument(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Contents extends ParsedNode ? Document.Parsed : Document; -/** - * Parse an input string into JavaScript. - * - * Only supports input consisting of a single YAML document; for multi-document - * support you should use `YAML.parseAllDocuments`. May throw on error, and may - * log warnings using `console.warn`. - * - * @param str - A string with YAML formatting. - * @param reviver - A reviver function, as in `JSON.parse()` - * @returns The value will match the type of the root value of the parsed YAML - * document, so Maps become objects, Sequences arrays, and scalars result in - * nulls, booleans, numbers and strings. - */ -export declare function parse(src: string, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any; -export declare function parse(src: string, reviver: Reviver, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any; -/** - * Stringify a value as a YAML document. - * - * @param replacer - A replacer array or function, as in `JSON.stringify()` - * @returns Will always include `\n` as the last character, as is expected of YAML documents. - */ -export declare function stringify(value: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions): string; -export declare function stringify(value: any, replacer?: Replacer | null, options?: string | number | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions)): string; diff --git a/node_modules/yaml/dist/public-api.js b/node_modules/yaml/dist/public-api.js deleted file mode 100644 index db76cef..0000000 --- a/node_modules/yaml/dist/public-api.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict'; - -var composer = require('./compose/composer.js'); -var Document = require('./doc/Document.js'); -var errors = require('./errors.js'); -var log = require('./log.js'); -var identity = require('./nodes/identity.js'); -var lineCounter = require('./parse/line-counter.js'); -var parser = require('./parse/parser.js'); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - if (identity.isDocument(value) && !_replacer) - return value.toString(options); - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; diff --git a/node_modules/yaml/dist/schema/Schema.d.ts b/node_modules/yaml/dist/schema/Schema.d.ts deleted file mode 100644 index c87b8bf..0000000 --- a/node_modules/yaml/dist/schema/Schema.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { MAP, SCALAR, SEQ } from '../nodes/identity.js'; -import type { Pair } from '../nodes/Pair.js'; -import type { SchemaOptions, ToStringOptions } from '../options.js'; -import type { CollectionTag, ScalarTag } from './types.js'; -export declare class Schema { - compat: Array | null; - knownTags: Record; - name: string; - sortMapEntries: ((a: Pair, b: Pair) => number) | null; - tags: Array; - toStringOptions: Readonly | null; - readonly [MAP]: CollectionTag; - readonly [SCALAR]: ScalarTag; - readonly [SEQ]: CollectionTag; - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }: SchemaOptions); - clone(): Schema; -} diff --git a/node_modules/yaml/dist/schema/Schema.js b/node_modules/yaml/dist/schema/Schema.js deleted file mode 100644 index 3926547..0000000 --- a/node_modules/yaml/dist/schema/Schema.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var map = require('./common/map.js'); -var seq = require('./common/seq.js'); -var string = require('./common/string.js'); -var tags = require('./tags.js'); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name, merge); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, identity.MAP, { value: map.map }); - Object.defineProperty(this, identity.SCALAR, { value: string.string }); - Object.defineProperty(this, identity.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; diff --git a/node_modules/yaml/dist/schema/common/map.d.ts b/node_modules/yaml/dist/schema/common/map.d.ts deleted file mode 100644 index 9b300f8..0000000 --- a/node_modules/yaml/dist/schema/common/map.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { CollectionTag } from '../types.js'; -export declare const map: CollectionTag; diff --git a/node_modules/yaml/dist/schema/common/map.js b/node_modules/yaml/dist/schema/common/map.js deleted file mode 100644 index 649c3b9..0000000 --- a/node_modules/yaml/dist/schema/common/map.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); - -const map = { - collection: 'map', - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!identity.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - }, - createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx) -}; - -exports.map = map; diff --git a/node_modules/yaml/dist/schema/common/null.d.ts b/node_modules/yaml/dist/schema/common/null.d.ts deleted file mode 100644 index 66abea5..0000000 --- a/node_modules/yaml/dist/schema/common/null.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const nullTag: ScalarTag & { - test: RegExp; -}; diff --git a/node_modules/yaml/dist/schema/common/null.js b/node_modules/yaml/dist/schema/common/null.js deleted file mode 100644 index cb353a7..0000000 --- a/node_modules/yaml/dist/schema/common/null.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; diff --git a/node_modules/yaml/dist/schema/common/seq.d.ts b/node_modules/yaml/dist/schema/common/seq.d.ts deleted file mode 100644 index c038d30..0000000 --- a/node_modules/yaml/dist/schema/common/seq.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { CollectionTag } from '../types.js'; -export declare const seq: CollectionTag; diff --git a/node_modules/yaml/dist/schema/common/seq.js b/node_modules/yaml/dist/schema/common/seq.js deleted file mode 100644 index 9c54bc9..0000000 --- a/node_modules/yaml/dist/schema/common/seq.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); - -const seq = { - collection: 'seq', - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!identity.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - }, - createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx) -}; - -exports.seq = seq; diff --git a/node_modules/yaml/dist/schema/common/string.d.ts b/node_modules/yaml/dist/schema/common/string.d.ts deleted file mode 100644 index 539c9b1..0000000 --- a/node_modules/yaml/dist/schema/common/string.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const string: ScalarTag; diff --git a/node_modules/yaml/dist/schema/common/string.js b/node_modules/yaml/dist/schema/common/string.js deleted file mode 100644 index 7601420..0000000 --- a/node_modules/yaml/dist/schema/common/string.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -var stringifyString = require('../../stringify/stringifyString.js'); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; diff --git a/node_modules/yaml/dist/schema/core/bool.d.ts b/node_modules/yaml/dist/schema/core/bool.d.ts deleted file mode 100644 index e4bdc4c..0000000 --- a/node_modules/yaml/dist/schema/core/bool.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const boolTag: ScalarTag & { - test: RegExp; -}; diff --git a/node_modules/yaml/dist/schema/core/bool.js b/node_modules/yaml/dist/schema/core/bool.js deleted file mode 100644 index 4def73c..0000000 --- a/node_modules/yaml/dist/schema/core/bool.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; diff --git a/node_modules/yaml/dist/schema/core/float.d.ts b/node_modules/yaml/dist/schema/core/float.d.ts deleted file mode 100644 index 22f0249..0000000 --- a/node_modules/yaml/dist/schema/core/float.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const floatNaN: ScalarTag; -export declare const floatExp: ScalarTag; -export declare const float: ScalarTag; diff --git a/node_modules/yaml/dist/schema/core/float.js b/node_modules/yaml/dist/schema/core/float.js deleted file mode 100644 index 8756446..0000000 --- a/node_modules/yaml/dist/schema/core/float.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; diff --git a/node_modules/yaml/dist/schema/core/int.d.ts b/node_modules/yaml/dist/schema/core/int.d.ts deleted file mode 100644 index 35e2d4b..0000000 --- a/node_modules/yaml/dist/schema/core/int.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intOct: ScalarTag; -export declare const int: ScalarTag; -export declare const intHex: ScalarTag; diff --git a/node_modules/yaml/dist/schema/core/int.js b/node_modules/yaml/dist/schema/core/int.js deleted file mode 100644 index fe4c9ca..0000000 --- a/node_modules/yaml/dist/schema/core/int.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; diff --git a/node_modules/yaml/dist/schema/core/schema.d.ts b/node_modules/yaml/dist/schema/core/schema.d.ts deleted file mode 100644 index f5bdd21..0000000 --- a/node_modules/yaml/dist/schema/core/schema.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const schema: (import("../types.js").CollectionTag | import("../types.js").ScalarTag)[]; diff --git a/node_modules/yaml/dist/schema/core/schema.js b/node_modules/yaml/dist/schema/core/schema.js deleted file mode 100644 index 6ab87f2..0000000 --- a/node_modules/yaml/dist/schema/core/schema.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -var map = require('../common/map.js'); -var _null = require('../common/null.js'); -var seq = require('../common/seq.js'); -var string = require('../common/string.js'); -var bool = require('./bool.js'); -var float = require('./float.js'); -var int = require('./int.js'); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; diff --git a/node_modules/yaml/dist/schema/json-schema.d.ts b/node_modules/yaml/dist/schema/json-schema.d.ts deleted file mode 100644 index 6d51f40..0000000 --- a/node_modules/yaml/dist/schema/json-schema.d.ts +++ /dev/null @@ -1,69 +0,0 @@ -type JsonSchema = boolean | ArraySchema | ObjectSchema | NumberSchema | StringSchema; -type JsonType = 'array' | 'object' | 'string' | 'number' | 'integer' | 'boolean' | 'null'; -interface CommonSchema { - type?: JsonType | JsonType[]; - const?: unknown; - enum?: unknown[]; - format?: string; - allOf?: JsonSchema[]; - anyOf?: JsonSchema[]; - oneOf?: JsonSchema[]; - not?: JsonSchema; - if?: JsonSchema; - then?: JsonSchema; - else?: JsonSchema; - $id?: string; - $defs?: Record; - $anchor?: string; - $dynamicAnchor?: string; - $ref?: string; - $dynamicRef?: string; - $schema?: string; - $vocabulary?: Record; - $comment?: string; - default?: unknown; - deprecated?: boolean; - readOnly?: boolean; - writeOnly?: boolean; - title?: string; - description?: string; - examples?: unknown[]; -} -interface ArraySchema extends CommonSchema { - prefixItems?: JsonSchema[]; - items?: JsonSchema; - contains?: JsonSchema; - unevaluatedItems?: JsonSchema; - maxItems?: number; - minItems?: number; - uniqueItems?: boolean; - maxContains?: number; - minContains?: number; -} -interface ObjectSchema extends CommonSchema { - properties?: Record; - patternProperties?: Record; - additionalProperties?: JsonSchema; - propertyNames?: JsonSchema; - unevaluatedProperties?: JsonSchema; - maxProperties?: number; - minProperties?: number; - required?: string[]; - dependentRequired?: Record; - dependentSchemas?: Record; -} -interface StringSchema extends CommonSchema { - maxLength?: number; - minLength?: number; - patter?: string; - contentEncoding?: string; - contentMediaType?: string; - contentSchema?: JsonSchema; -} -interface NumberSchema extends CommonSchema { - multipleOf?: number; - maximum?: number; - exclusiveMaximum?: number; - minimum?: number; - exclusiveMinimum?: number; -} diff --git a/node_modules/yaml/dist/schema/json/schema.d.ts b/node_modules/yaml/dist/schema/json/schema.d.ts deleted file mode 100644 index 76a4301..0000000 --- a/node_modules/yaml/dist/schema/json/schema.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { CollectionTag, ScalarTag } from '../types.js'; -export declare const schema: (CollectionTag | ScalarTag)[]; diff --git a/node_modules/yaml/dist/schema/json/schema.js b/node_modules/yaml/dist/schema/json/schema.js deleted file mode 100644 index ccb871a..0000000 --- a/node_modules/yaml/dist/schema/json/schema.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var map = require('../common/map.js'); -var seq = require('../common/seq.js'); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true$|^false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; diff --git a/node_modules/yaml/dist/schema/tags.d.ts b/node_modules/yaml/dist/schema/tags.d.ts deleted file mode 100644 index ad22aba..0000000 --- a/node_modules/yaml/dist/schema/tags.d.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { SchemaOptions } from '../options.js'; -import type { CollectionTag, ScalarTag } from './types.js'; -declare const tagsByName: { - binary: ScalarTag; - bool: ScalarTag & { - test: RegExp; - }; - float: ScalarTag; - floatExp: ScalarTag; - floatNaN: ScalarTag; - floatTime: ScalarTag; - int: ScalarTag; - intHex: ScalarTag; - intOct: ScalarTag; - intTime: ScalarTag; - map: CollectionTag; - merge: ScalarTag & { - identify(value: unknown): boolean; - test: RegExp; - }; - null: ScalarTag & { - test: RegExp; - }; - omap: CollectionTag; - pairs: CollectionTag; - seq: CollectionTag; - set: CollectionTag; - timestamp: ScalarTag & { - test: RegExp; - }; -}; -export type TagId = keyof typeof tagsByName; -export type Tags = Array; -export declare const coreKnownTags: { - 'tag:yaml.org,2002:binary': ScalarTag; - 'tag:yaml.org,2002:merge': ScalarTag & { - identify(value: unknown): boolean; - test: RegExp; - }; - 'tag:yaml.org,2002:omap': CollectionTag; - 'tag:yaml.org,2002:pairs': CollectionTag; - 'tag:yaml.org,2002:set': CollectionTag; - 'tag:yaml.org,2002:timestamp': ScalarTag & { - test: RegExp; - }; -}; -export declare function getTags(customTags: SchemaOptions['customTags'] | undefined, schemaName: string, addMergeTag?: boolean): (CollectionTag | ScalarTag)[]; -export {}; diff --git a/node_modules/yaml/dist/schema/tags.js b/node_modules/yaml/dist/schema/tags.js deleted file mode 100644 index bd67d86..0000000 --- a/node_modules/yaml/dist/schema/tags.js +++ /dev/null @@ -1,99 +0,0 @@ -'use strict'; - -var map = require('./common/map.js'); -var _null = require('./common/null.js'); -var seq = require('./common/seq.js'); -var string = require('./common/string.js'); -var bool = require('./core/bool.js'); -var float = require('./core/float.js'); -var int = require('./core/int.js'); -var schema = require('./core/schema.js'); -var schema$1 = require('./json/schema.js'); -var binary = require('./yaml-1.1/binary.js'); -var merge = require('./yaml-1.1/merge.js'); -var omap = require('./yaml-1.1/omap.js'); -var pairs = require('./yaml-1.1/pairs.js'); -var schema$2 = require('./yaml-1.1/schema.js'); -var set = require('./yaml-1.1/set.js'); -var timestamp = require('./yaml-1.1/timestamp.js'); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - merge: merge.merge, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:merge': merge.merge, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName, addMergeTag) { - const schemaTags = schemas.get(schemaName); - if (schemaTags && !customTags) { - return addMergeTag && !schemaTags.includes(merge.merge) - ? schemaTags.concat(merge.merge) - : schemaTags.slice(); - } - let tags = schemaTags; - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - if (addMergeTag) - tags = tags.concat(merge.merge); - return tags.reduce((tags, tag) => { - const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag; - if (!tagObj) { - const tagName = JSON.stringify(tag); - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`); - } - if (!tags.includes(tagObj)) - tags.push(tagObj); - return tags; - }, []); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; diff --git a/node_modules/yaml/dist/schema/types.d.ts b/node_modules/yaml/dist/schema/types.d.ts deleted file mode 100644 index 58a943c..0000000 --- a/node_modules/yaml/dist/schema/types.d.ts +++ /dev/null @@ -1,92 +0,0 @@ -import type { CreateNodeContext } from '../doc/createNode.js'; -import type { Node } from '../nodes/Node.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { YAMLMap } from '../nodes/YAMLMap.js'; -import type { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { ParseOptions } from '../options.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import type { Schema } from './Schema.js'; -interface TagBase { - /** - * An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes. - */ - createNode?: (schema: Schema, value: unknown, ctx: CreateNodeContext) => Node; - /** - * If `true`, allows for values to be stringified without - * an explicit tag together with `test`. - * If `'key'`, this only applies if the value is used as a mapping key. - * For most cases, it's unlikely that you'll actually want to use this, - * even if you first think you do. - */ - default?: boolean | 'key'; - /** - * If a tag has multiple forms that should be parsed and/or stringified - * differently, use `format` to identify them. - */ - format?: string; - /** - * Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or - * `instanceof`. - */ - identify?: (value: unknown) => boolean; - /** - * The identifier for your data type, with which its stringified form will be - * prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified - * `tag:domain,date:foo`. - */ - tag: string; -} -export interface ScalarTag extends TagBase { - collection?: never; - nodeClass?: never; - /** - * Turns a value into an AST node. - * If returning a non-`Node` value, the output will be wrapped as a `Scalar`. - */ - resolve(value: string, onError: (message: string) => void, options: ParseOptions): unknown; - /** - * Optional function stringifying a Scalar node. If your data includes a - * suitable `.toString()` method, you can probably leave this undefined and - * use the default stringifier. - * - * @param item The node being stringified. - * @param ctx Contains the stringifying context variables. - * @param onComment Callback to signal that the stringifier includes the - * item's comment in its output. - * @param onChompKeep Callback to signal that the output uses a block scalar - * type with the `+` chomping indicator. - */ - stringify?: (item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void) => string; - /** - * Together with `default` allows for values to be stringified without an - * explicit tag and detected using a regular expression. For most cases, it's - * unlikely that you'll actually want to use these, even if you first think - * you do. - */ - test?: RegExp; -} -export interface CollectionTag extends TagBase { - stringify?: never; - test?: never; - /** The source collection type supported by this tag. */ - collection: 'map' | 'seq'; - /** - * The `Node` child class that implements this tag. - * If set, used to select this tag when stringifying. - * - * If the class provides a static `from` method, then that - * will be used if the tag object doesn't have a `createNode` method. - */ - nodeClass?: { - new (schema?: Schema): Node; - from?: (schema: Schema, obj: unknown, ctx: CreateNodeContext) => Node; - }; - /** - * Turns a value into an AST node. - * If returning a non-`Node` value, the output will be wrapped as a `Scalar`. - * - * Note: this is required if nodeClass is not provided. - */ - resolve?: (value: YAMLMap.Parsed | YAMLSeq.Parsed, onError: (message: string) => void, options: ParseOptions) => unknown; -} -export {}; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts deleted file mode 100644 index 2054970..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const binary: ScalarTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.js b/node_modules/yaml/dist/schema/yaml-1.1/binary.js deleted file mode 100644 index 38fa498..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/binary.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyString = require('../../stringify/stringifyString.js'); - -const binary = { - identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts deleted file mode 100644 index 587b55b..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const trueTag: ScalarTag & { - test: RegExp; -}; -export declare const falseTag: ScalarTag & { - test: RegExp; -}; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.js b/node_modules/yaml/dist/schema/yaml-1.1/bool.js deleted file mode 100644 index d987952..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/bool.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts deleted file mode 100644 index 22f0249..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const floatNaN: ScalarTag; -export declare const floatExp: ScalarTag; -export declare const float: ScalarTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.js b/node_modules/yaml/dist/schema/yaml-1.1/float.js deleted file mode 100644 index 39f1eb0..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/float.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts deleted file mode 100644 index 3d92f37..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intBin: ScalarTag; -export declare const intOct: ScalarTag; -export declare const int: ScalarTag; -export declare const intHex: ScalarTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.js b/node_modules/yaml/dist/schema/yaml-1.1/int.js deleted file mode 100644 index fdf47ca..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/int.js +++ /dev/null @@ -1,76 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/merge.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/merge.d.ts deleted file mode 100644 index 47ce50a..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/merge.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type { ToJSContext } from '../../nodes/toJS.js'; -import type { MapLike } from '../../nodes/YAMLMap.js'; -import type { ScalarTag } from '../types.js'; -export declare const merge: ScalarTag & { - identify(value: unknown): boolean; - test: RegExp; -}; -export declare const isMergeKey: (ctx: ToJSContext | undefined, key: unknown) => boolean | undefined; -export declare function addMergeToJSMap(ctx: ToJSContext | undefined, map: MapLike, value: unknown): void; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/merge.js b/node_modules/yaml/dist/schema/yaml-1.1/merge.js deleted file mode 100644 index ef2ff32..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/merge.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var Scalar = require('../../nodes/Scalar.js'); - -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -const MERGE_KEY = '<<'; -const merge = { - identify: value => value === MERGE_KEY || - (typeof value === 'symbol' && value.description === MERGE_KEY), - default: 'key', - tag: 'tag:yaml.org,2002:merge', - test: /^<<$/, - resolve: () => Object.assign(new Scalar.Scalar(Symbol(MERGE_KEY)), { - addToJSMap: addMergeToJSMap - }), - stringify: () => MERGE_KEY -}; -const isMergeKey = (ctx, key) => (merge.identify(key) || - (identity.isScalar(key) && - (!key.type || key.type === Scalar.Scalar.PLAIN) && - merge.identify(key.value))) && - ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default); -function addMergeToJSMap(ctx, map, value) { - value = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; - if (identity.isSeq(value)) - for (const it of value.items) - mergeValue(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeValue(ctx, map, it); - else - mergeValue(ctx, map, value); -} -function mergeValue(ctx, map, value) { - const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!identity.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} - -exports.addMergeToJSMap = addMergeToJSMap; -exports.isMergeKey = isMergeKey; -exports.merge = merge; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts deleted file mode 100644 index 95b03c7..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { ToJSContext } from '../../nodes/toJS.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import { CreateNodeContext } from '../../util.js'; -import type { Schema } from '../Schema.js'; -import { CollectionTag } from '../types.js'; -export declare class YAMLOMap extends YAMLSeq { - static tag: string; - constructor(); - add: (pair: import("../../index.js").Pair | { - key: any; - value: any; - }, overwrite?: boolean) => void; - delete: (key: unknown) => boolean; - get: { - (key: unknown, keepScalar: true): import("../../index.js").Scalar | undefined; - (key: unknown, keepScalar?: false): any; - (key: unknown, keepScalar?: boolean): any; - }; - has: (key: unknown) => boolean; - set: (key: any, value: any) => void; - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_?: unknown, ctx?: ToJSContext): unknown[]; - static from(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLOMap; -} -export declare const omap: CollectionTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.js b/node_modules/yaml/dist/schema/yaml-1.1/omap.js deleted file mode 100644 index 3ca141d..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/omap.js +++ /dev/null @@ -1,77 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var toJS = require('../../nodes/toJS.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); -var pairs = require('./pairs.js'); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (identity.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } - static from(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new this(); - omap.items = pairs$1.items; - return omap; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (identity.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts deleted file mode 100644 index 20bb907..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { CreateNodeContext } from '../../doc/createNode.js'; -import type { ParsedNode } from '../../nodes/Node.js'; -import { Pair } from '../../nodes/Pair.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import type { Schema } from '../../schema/Schema.js'; -import type { CollectionTag } from '../types.js'; -export declare function resolvePairs(seq: YAMLSeq.Parsed> | YAMLMap.Parsed, onError: (message: string) => void): YAMLSeq.Parsed>; -export declare function createPairs(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSeq; -export declare const pairs: CollectionTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.js b/node_modules/yaml/dist/schema/yaml-1.1/pairs.js deleted file mode 100644 index aa32e0f..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/pairs.js +++ /dev/null @@ -1,82 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var Pair = require('../../nodes/Pair.js'); -var Scalar = require('../../nodes/Scalar.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); - -function resolvePairs(seq, onError) { - if (identity.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (identity.isPair(item)) - continue; - else if (identity.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else { - throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); - } - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts deleted file mode 100644 index f5bdd21..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const schema: (import("../types.js").CollectionTag | import("../types.js").ScalarTag)[]; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.js b/node_modules/yaml/dist/schema/yaml-1.1/schema.js deleted file mode 100644 index 2ea4b73..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/schema.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -var map = require('../common/map.js'); -var _null = require('../common/null.js'); -var seq = require('../common/seq.js'); -var string = require('../common/string.js'); -var binary = require('./binary.js'); -var bool = require('./bool.js'); -var float = require('./float.js'); -var int = require('./int.js'); -var merge = require('./merge.js'); -var omap = require('./omap.js'); -var pairs = require('./pairs.js'); -var set = require('./set.js'); -var timestamp = require('./timestamp.js'); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - merge.merge, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts deleted file mode 100644 index 2054fb7..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Pair } from '../../nodes/Pair.js'; -import { Scalar } from '../../nodes/Scalar.js'; -import { ToJSContext } from '../../nodes/toJS.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import type { Schema } from '../../schema/Schema.js'; -import type { StringifyContext } from '../../stringify/stringify.js'; -import { CreateNodeContext } from '../../util.js'; -import type { CollectionTag } from '../types.js'; -export declare class YAMLSet extends YAMLMap | null> { - static tag: string; - constructor(schema?: Schema); - add(key: T | Pair | null> | { - key: T; - value: Scalar | null; - }): void; - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key: unknown, keepPair?: boolean): any; - set(key: T, value: boolean): void; - /** @deprecated Will throw; `value` must be boolean */ - set(key: T, value: null): void; - toJSON(_?: unknown, ctx?: ToJSContext): any; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - static from(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSet; -} -export declare const set: CollectionTag; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.js b/node_modules/yaml/dist/schema/yaml-1.1/set.js deleted file mode 100644 index 650c250..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/set.js +++ /dev/null @@ -1,96 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var Pair = require('../../nodes/Pair.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (identity.isPair(key)) - pair = key; - else if (key && - typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && identity.isPair(pair) - ? identity.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } - static from(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new this(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), - resolve(map, onError) { - if (identity.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts deleted file mode 100644 index 0c1d2d4..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intTime: ScalarTag; -export declare const floatTime: ScalarTag; -export declare const timestamp: ScalarTag & { - test: RegExp; -}; diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js deleted file mode 100644 index 7163570..0000000 --- a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => String(n).padStart(2, '0')) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/(T00:00:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.d.ts b/node_modules/yaml/dist/stringify/foldFlowLines.d.ts deleted file mode 100644 index aac3cac..0000000 --- a/node_modules/yaml/dist/stringify/foldFlowLines.d.ts +++ /dev/null @@ -1,34 +0,0 @@ -export declare const FOLD_FLOW = "flow"; -export declare const FOLD_BLOCK = "block"; -export declare const FOLD_QUOTED = "quoted"; -/** - * `'block'` prevents more-indented lines from being folded; - * `'quoted'` allows for `\` escapes, including escaped newlines - */ -export type FoldMode = 'flow' | 'block' | 'quoted'; -export interface FoldOptions { - /** - * Accounts for leading contents on the first line, defaulting to - * `indent.length` - */ - indentAtStart?: number; - /** Default: `80` */ - lineWidth?: number; - /** - * Allow highly indented lines to stretch the line width or indent content - * from the start. - * - * Default: `20` - */ - minContentWidth?: number; - /** Called once if the text is folded */ - onFold?: () => void; - /** Called once if any line of text exceeds lineWidth characters */ - onOverflow?: () => void; -} -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -export declare function foldFlowLines(text: string, indent: string, mode?: FoldMode, { indentAtStart, lineWidth, minContentWidth, onFold, onOverflow }?: FoldOptions): string; diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.js b/node_modules/yaml/dist/stringify/foldFlowLines.js deleted file mode 100644 index 9c61058..0000000 --- a/node_modules/yaml/dist/stringify/foldFlowLines.js +++ /dev/null @@ -1,151 +0,0 @@ -'use strict'; - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - if (lineWidth < minContentWidth) - minContentWidth = 0; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i, indent.length); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i, indent.length); - end = i + indent.length + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i, indent) { - let end = i; - let start = i + 1; - let ch = text[start]; - while (ch === ' ' || ch === '\t') { - if (i < start + indent) { - ch = text[++i]; - } - else { - do { - ch = text[++i]; - } while (ch && ch !== '\n'); - end = i; - start = i + 1; - ch = text[start]; - } - } - return end; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; diff --git a/node_modules/yaml/dist/stringify/stringify.d.ts b/node_modules/yaml/dist/stringify/stringify.d.ts deleted file mode 100644 index f408b75..0000000 --- a/node_modules/yaml/dist/stringify/stringify.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Alias } from '../nodes/Alias.js'; -import type { ToStringOptions } from '../options.js'; -export type StringifyContext = { - actualString?: boolean; - allNullValues?: boolean; - anchors: Set; - doc: Document; - forceBlockIndent?: boolean; - implicitKey?: boolean; - indent: string; - indentStep: string; - indentAtStart?: number; - inFlow: boolean | null; - inStringifyKey?: boolean; - flowCollectionPadding: string; - options: Readonly>>; - resolvedAliases?: Set; -}; -export declare function createStringifyContext(doc: Document, options: ToStringOptions): StringifyContext; -export declare function stringify(item: unknown, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; diff --git a/node_modules/yaml/dist/stringify/stringify.js b/node_modules/yaml/dist/stringify/stringify.js deleted file mode 100644 index e10c08a..0000000 --- a/node_modules/yaml/dist/stringify/stringify.js +++ /dev/null @@ -1,132 +0,0 @@ -'use strict'; - -var anchors = require('../doc/anchors.js'); -var identity = require('../nodes/identity.js'); -var stringifyComment = require('./stringifyComment.js'); -var stringifyString = require('./stringifyString.js'); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - flowCollectionPadding: true, - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (identity.isScalar(item)) { - obj = item.value; - let match = tags.filter(t => t.identify?.(obj)); - if (match.length > 1) { - const testMatch = match.filter(t => t.test); - if (testMatch.length > 0) - match = testMatch; - } - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (identity.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (identity.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = identity.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : identity.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return identity.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.d.ts b/node_modules/yaml/dist/stringify/stringifyCollection.d.ts deleted file mode 100644 index 207d703..0000000 --- a/node_modules/yaml/dist/stringify/stringifyCollection.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Collection } from '../nodes/Collection.js'; -import { StringifyContext } from './stringify.js'; -interface StringifyCollectionOptions { - blockItemPrefix: string; - flowChars: { - start: '{'; - end: '}'; - } | { - start: '['; - end: ']'; - }; - itemIndent: string; - onChompKeep?: () => void; - onComment?: () => void; -} -export declare function stringifyCollection(collection: Readonly, ctx: StringifyContext, options: StringifyCollectionOptions): string; -export {}; diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.js b/node_modules/yaml/dist/stringify/stringifyCollection.js deleted file mode 100644 index 6efffc5..0000000 --- a/node_modules/yaml/dist/stringify/stringifyCollection.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (identity.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (identity.isPair(item)) { - const ik = identity.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) { - const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (identity.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (identity.isPair(item)) { - const ik = identity.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = identity.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik?.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - const { start, end } = flowChars; - if (lines.length === 0) { - return start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth; - } - if (reqNewline) { - let str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - return `${str}\n${indent}${end}`; - } - else { - return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; - } - } -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; diff --git a/node_modules/yaml/dist/stringify/stringifyComment.d.ts b/node_modules/yaml/dist/stringify/stringifyComment.d.ts deleted file mode 100644 index 9fcf48d..0000000 --- a/node_modules/yaml/dist/stringify/stringifyComment.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -export declare const stringifyComment: (str: string) => string; -export declare function indentComment(comment: string, indent: string): string; -export declare const lineComment: (str: string, indent: string, comment: string) => string; diff --git a/node_modules/yaml/dist/stringify/stringifyComment.js b/node_modules/yaml/dist/stringify/stringifyComment.js deleted file mode 100644 index 26bf361..0000000 --- a/node_modules/yaml/dist/stringify/stringifyComment.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict'; - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.d.ts b/node_modules/yaml/dist/stringify/stringifyDocument.d.ts deleted file mode 100644 index 1eeb177..0000000 --- a/node_modules/yaml/dist/stringify/stringifyDocument.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Node } from '../nodes/Node.js'; -import type { ToStringOptions } from '../options.js'; -export declare function stringifyDocument(doc: Readonly>, options: ToStringOptions): string; diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.js b/node_modules/yaml/dist/stringify/stringifyDocument.js deleted file mode 100644 index fb9d73c..0000000 --- a/node_modules/yaml/dist/stringify/stringifyDocument.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (identity.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.d.ts b/node_modules/yaml/dist/stringify/stringifyNumber.d.ts deleted file mode 100644 index 3c14df1..0000000 --- a/node_modules/yaml/dist/stringify/stringifyNumber.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Scalar } from '../nodes/Scalar.js'; -export declare function stringifyNumber({ format, minFractionDigits, tag, value }: Scalar): string; diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.js b/node_modules/yaml/dist/stringify/stringifyNumber.js deleted file mode 100644 index 4118ff6..0000000 --- a/node_modules/yaml/dist/stringify/stringifyNumber.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict'; - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; diff --git a/node_modules/yaml/dist/stringify/stringifyPair.d.ts b/node_modules/yaml/dist/stringify/stringifyPair.d.ts deleted file mode 100644 index c512149..0000000 --- a/node_modules/yaml/dist/stringify/stringifyPair.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type { Pair } from '../nodes/Pair.js'; -import { StringifyContext } from './stringify.js'; -export declare function stringifyPair({ key, value }: Readonly, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; diff --git a/node_modules/yaml/dist/stringify/stringifyPair.js b/node_modules/yaml/dist/stringify/stringifyPair.js deleted file mode 100644 index 716ea9a..0000000 --- a/node_modules/yaml/dist/stringify/stringifyPair.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (identity.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (identity.isCollection(key) || (!identity.isNode(key) && typeof key === 'object')) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - identity.isCollection(key) || - (identity.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vsb, vcb, valueComment; - if (identity.isNode(value)) { - vsb = !!value.spaceBefore; - vcb = value.commentBefore; - valueComment = value.comment; - } - else { - vsb = false; - vcb = null; - valueComment = null; - if (value && typeof value === 'object') - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && identity.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - identity.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substring(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (keyComment || vsb || vcb) { - ws = vsb ? '\n' : ''; - if (vcb) { - const cs = commentString(vcb); - ws += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - if (valueStr === '' && !ctx.inFlow) { - if (ws === '\n') - ws = '\n\n'; - } - else { - ws += `\n${ctx.indent}`; - } - } - else if (!explicitKey && identity.isCollection(value)) { - const vs0 = valueStr[0]; - const nl0 = valueStr.indexOf('\n'); - const hasNewline = nl0 !== -1; - const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; - if (hasNewline || !flow) { - let hasPropsLine = false; - if (hasNewline && (vs0 === '&' || vs0 === '!')) { - let sp0 = valueStr.indexOf(' '); - if (vs0 === '&' && - sp0 !== -1 && - sp0 < nl0 && - valueStr[sp0 + 1] === '!') { - sp0 = valueStr.indexOf(' ', sp0 + 1); - } - if (sp0 === -1 || nl0 < sp0) - hasPropsLine = true; - } - if (!hasPropsLine) - ws = `\n${ctx.indent}`; - } - } - else if (valueStr === '' || valueStr[0] === '\n') { - ws = ''; - } - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; diff --git a/node_modules/yaml/dist/stringify/stringifyString.d.ts b/node_modules/yaml/dist/stringify/stringifyString.d.ts deleted file mode 100644 index 017cc4e..0000000 --- a/node_modules/yaml/dist/stringify/stringifyString.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import type { StringifyContext } from './stringify.js'; -interface StringifyScalar { - value: string; - comment?: string | null; - type?: string; -} -export declare function stringifyString(item: Scalar | StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -export {}; diff --git a/node_modules/yaml/dist/stringify/stringifyString.js b/node_modules/yaml/dist/stringify/stringifyString.js deleted file mode 100644 index 67252ce..0000000 --- a/node_modules/yaml/dist/stringify/stringifyString.js +++ /dev/null @@ -1,339 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); -var foldFlowLines = require('./foldFlowLines.js'); - -const getFoldOptions = (ctx, isBlock) => ({ - indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -// The negative lookbehind avoids a polynomial search, -// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind -let blockEndNewlines; -try { - blockEndNewlines = new RegExp('(^|(?\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(blockEndNewlines, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - // Leading | or > is added later - let header = (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (!literal) { - const foldedValue = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - let literalFallback = false; - const foldOptions = getFoldOptions(ctx, true); - if (blockQuote !== 'folded' && type !== Scalar.Scalar.BLOCK_FOLDED) { - foldOptions.onOverflow = () => { - literalFallback = true; - }; - } - const body = foldFlowLines.foldFlowLines(`${start}${foldedValue}${end}`, indent, foldFlowLines.FOLD_BLOCK, foldOptions); - if (!literalFallback) - return `>${header}\n${indent}${body}`; - } - value = value.replace(/\n+/g, `$&${indent}`); - return `|${header}\n${indent}${start}${value}${end}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; - if ((implicitKey && value.includes('\n')) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (containsDocumentMarker(value)) { - if (indent === '') { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - else if (implicitKey && indent === indentStep) { - return quotedString(value, ctx); - } - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; diff --git a/node_modules/yaml/dist/test-events.d.ts b/node_modules/yaml/dist/test-events.d.ts deleted file mode 100644 index d1a2348..0000000 --- a/node_modules/yaml/dist/test-events.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export declare function testEvents(src: string): { - events: string[]; - error: unknown; -}; diff --git a/node_modules/yaml/dist/test-events.js b/node_modules/yaml/dist/test-events.js deleted file mode 100644 index f38d336..0000000 --- a/node_modules/yaml/dist/test-events.js +++ /dev/null @@ -1,134 +0,0 @@ -'use strict'; - -var identity = require('./nodes/identity.js'); -var publicApi = require('./public-api.js'); -var visit = require('./visit.js'); - -const scalarChar = { - BLOCK_FOLDED: '>', - BLOCK_LITERAL: '|', - PLAIN: ':', - QUOTE_DOUBLE: '"', - QUOTE_SINGLE: "'" -}; -function anchorExists(doc, anchor) { - let found = false; - visit.visit(doc, { - Value(_key, node) { - if (node.anchor === anchor) { - found = true; - return visit.visit.BREAK; - } - } - }); - return found; -} -// test harness for yaml-test-suite event tests -function testEvents(src) { - const docs = publicApi.parseAllDocuments(src); - const errDoc = docs.find(doc => doc.errors.length > 0); - const error = errDoc ? errDoc.errors[0].message : null; - const events = ['+STR']; - try { - for (let i = 0; i < docs.length; ++i) { - const doc = docs[i]; - let root = doc.contents; - if (Array.isArray(root)) - root = root[0]; - const [rootStart] = doc.range || [0]; - const error = doc.errors[0]; - if (error && (!error.pos || error.pos[0] < rootStart)) - throw new Error(); - let docStart = '+DOC'; - if (doc.directives.docStart) - docStart += ' ---'; - else if (doc.contents && - doc.contents.range[2] === doc.contents.range[0] && - !doc.contents.anchor && - !doc.contents.tag) - continue; - events.push(docStart); - addEvents(events, doc, error?.pos[0] ?? -1, root); - let docEnd = '-DOC'; - if (doc.directives.docEnd) - docEnd += ' ...'; - events.push(docEnd); - } - } - catch (e) { - return { events, error: error ?? e }; - } - events.push('-STR'); - return { events, error }; -} -function addEvents(events, doc, errPos, node) { - if (!node) { - events.push('=VAL :'); - return; - } - if (errPos !== -1 && identity.isNode(node) && node.range[0] >= errPos) - throw new Error(); - let props = ''; - let anchor = identity.isScalar(node) || identity.isCollection(node) ? node.anchor : undefined; - if (anchor) { - if (/\d$/.test(anchor)) { - const alt = anchor.replace(/\d$/, ''); - if (anchorExists(doc, alt)) - anchor = alt; - } - props = ` &${anchor}`; - } - if (identity.isNode(node) && node.tag) - props += ` <${node.tag}>`; - if (identity.isMap(node)) { - const ev = node.flow ? '+MAP {}' : '+MAP'; - events.push(`${ev}${props}`); - node.items.forEach(({ key, value }) => { - addEvents(events, doc, errPos, key); - addEvents(events, doc, errPos, value); - }); - events.push('-MAP'); - } - else if (identity.isSeq(node)) { - const ev = node.flow ? '+SEQ []' : '+SEQ'; - events.push(`${ev}${props}`); - node.items.forEach(item => { - addEvents(events, doc, errPos, item); - }); - events.push('-SEQ'); - } - else if (identity.isPair(node)) { - events.push(`+MAP${props}`); - addEvents(events, doc, errPos, node.key); - addEvents(events, doc, errPos, node.value); - events.push('-MAP'); - } - else if (identity.isAlias(node)) { - let alias = node.source; - if (alias && /\d$/.test(alias)) { - const alt = alias.replace(/\d$/, ''); - if (anchorExists(doc, alt)) - alias = alt; - } - events.push(`=ALI${props} *${alias}`); - } - else { - const scalar = scalarChar[String(node.type)]; - if (!scalar) - throw new Error(`Unexpected node type ${node.type}`); - const value = node.source - .replace(/\\/g, '\\\\') - .replace(/\0/g, '\\0') - .replace(/\x07/g, '\\a') - .replace(/\x08/g, '\\b') - .replace(/\t/g, '\\t') - .replace(/\n/g, '\\n') - .replace(/\v/g, '\\v') - .replace(/\f/g, '\\f') - .replace(/\r/g, '\\r') - .replace(/\x1b/g, '\\e'); - events.push(`=VAL${props} ${scalar}${value}`); - } -} - -exports.testEvents = testEvents; diff --git a/node_modules/yaml/dist/util.d.ts b/node_modules/yaml/dist/util.d.ts deleted file mode 100644 index 3d1b198..0000000 --- a/node_modules/yaml/dist/util.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -export { createNode, CreateNodeContext } from './doc/createNode.js'; -export { debug, LogLevelId, warn } from './log.js'; -export { createPair } from './nodes/Pair.js'; -export { findPair } from './nodes/YAMLMap.js'; -export { toJS, ToJSContext } from './nodes/toJS.js'; -export { map as mapTag } from './schema/common/map.js'; -export { seq as seqTag } from './schema/common/seq.js'; -export { string as stringTag } from './schema/common/string.js'; -export { foldFlowLines, FoldOptions } from './stringify/foldFlowLines'; -export { StringifyContext } from './stringify/stringify.js'; -export { stringifyNumber } from './stringify/stringifyNumber.js'; -export { stringifyString } from './stringify/stringifyString.js'; diff --git a/node_modules/yaml/dist/util.js b/node_modules/yaml/dist/util.js deleted file mode 100644 index 2e0e5cd..0000000 --- a/node_modules/yaml/dist/util.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -var createNode = require('./doc/createNode.js'); -var log = require('./log.js'); -var Pair = require('./nodes/Pair.js'); -var YAMLMap = require('./nodes/YAMLMap.js'); -var toJS = require('./nodes/toJS.js'); -var map = require('./schema/common/map.js'); -var seq = require('./schema/common/seq.js'); -var string = require('./schema/common/string.js'); -var foldFlowLines = require('./stringify/foldFlowLines.js'); -var stringifyNumber = require('./stringify/stringifyNumber.js'); -var stringifyString = require('./stringify/stringifyString.js'); - - - -exports.createNode = createNode.createNode; -exports.debug = log.debug; -exports.warn = log.warn; -exports.createPair = Pair.createPair; -exports.findPair = YAMLMap.findPair; -exports.toJS = toJS.toJS; -exports.mapTag = map.map; -exports.seqTag = seq.seq; -exports.stringTag = string.string; -exports.foldFlowLines = foldFlowLines.foldFlowLines; -exports.stringifyNumber = stringifyNumber.stringifyNumber; -exports.stringifyString = stringifyString.stringifyString; diff --git a/node_modules/yaml/dist/visit.d.ts b/node_modules/yaml/dist/visit.d.ts deleted file mode 100644 index 7a27bfc..0000000 --- a/node_modules/yaml/dist/visit.d.ts +++ /dev/null @@ -1,102 +0,0 @@ -import type { Document } from './doc/Document.js'; -import type { Alias } from './nodes/Alias.js'; -import { Node } from './nodes/Node.js'; -import type { Pair } from './nodes/Pair.js'; -import type { Scalar } from './nodes/Scalar.js'; -import type { YAMLMap } from './nodes/YAMLMap.js'; -import type { YAMLSeq } from './nodes/YAMLSeq.js'; -export type visitorFn = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair; -export type visitor = visitorFn | { - Alias?: visitorFn; - Collection?: visitorFn; - Map?: visitorFn; - Node?: visitorFn; - Pair?: visitorFn; - Scalar?: visitorFn; - Seq?: visitorFn; - Value?: visitorFn; -}; -export type asyncVisitorFn = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair | Promise; -export type asyncVisitor = asyncVisitorFn | { - Alias?: asyncVisitorFn; - Collection?: asyncVisitorFn; - Map?: asyncVisitorFn; - Node?: asyncVisitorFn; - Pair?: asyncVisitorFn; - Scalar?: asyncVisitorFn; - Seq?: asyncVisitorFn; - Value?: asyncVisitorFn; -}; -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -export declare function visit(node: Node | Document | null, visitor: visitor): void; -export declare namespace visit { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -export declare function visitAsync(node: Node | Document | null, visitor: asyncVisitor): Promise; -export declare namespace visitAsync { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; -} diff --git a/node_modules/yaml/dist/visit.js b/node_modules/yaml/dist/visit.js deleted file mode 100644 index f126e54..0000000 --- a/node_modules/yaml/dist/visit.js +++ /dev/null @@ -1,236 +0,0 @@ -'use strict'; - -var identity = require('./nodes/identity.js'); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (identity.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (identity.isNode(ctrl) || identity.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (identity.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (identity.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (identity.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (identity.isNode(ctrl) || identity.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (identity.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (identity.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (identity.isMap(node)) - return visitor.Map?.(key, node, path); - if (identity.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (identity.isPair(node)) - return visitor.Pair?.(key, node, path); - if (identity.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (identity.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (identity.isCollection(parent)) { - parent.items[key] = node; - } - else if (identity.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (identity.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = identity.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; diff --git a/node_modules/yaml/package.json b/node_modules/yaml/package.json deleted file mode 100644 index 35966a3..0000000 --- a/node_modules/yaml/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "yaml", - "version": "2.6.1", - "license": "ISC", - "author": "Eemeli Aro ", - "repository": "github:eemeli/yaml", - "description": "JavaScript parser and stringifier for YAML", - "keywords": [ - "YAML", - "parser", - "stringifier" - ], - "homepage": "https://eemeli.org/yaml/", - "files": [ - "browser/", - "dist/", - "util.js" - ], - "type": "commonjs", - "main": "./dist/index.js", - "bin": "./bin.mjs", - "browser": { - "./dist/index.js": "./browser/index.js", - "./dist/util.js": "./browser/dist/util.js", - "./util.js": "./browser/dist/util.js" - }, - "exports": { - ".": { - "types": "./dist/index.d.ts", - "node": "./dist/index.js", - "default": "./browser/index.js" - }, - "./package.json": "./package.json", - "./util": { - "types": "./dist/util.d.ts", - "node": "./dist/util.js", - "default": "./browser/dist/util.js" - } - }, - "scripts": { - "build": "npm run build:node && npm run build:browser", - "build:browser": "rollup -c config/rollup.browser-config.mjs", - "build:node": "rollup -c config/rollup.node-config.mjs", - "clean": "git clean -fdxe node_modules", - "lint": "eslint config/ src/", - "prettier": "prettier --write .", - "prestart": "rollup --sourcemap -c config/rollup.node-config.mjs", - "start": "node --enable-source-maps -i -e 'YAML=require(\"./dist/index.js\");const{parse,parseDocument,parseAllDocuments}=YAML'", - "test": "jest --config config/jest.config.js", - "test:all": "npm test && npm run test:types && npm run test:dist && npm run test:dist:types", - "test:browsers": "cd playground && npm test", - "test:dist": "npm run build:node && jest --config config/jest.config.js", - "test:dist:types": "tsc --allowJs --moduleResolution node --noEmit --target es5 dist/index.js", - "test:types": "tsc --noEmit && tsc --noEmit -p tests/tsconfig.json", - "docs:install": "cd docs-slate && bundle install", - "predocs:deploy": "node docs/prepare-docs.mjs", - "docs:deploy": "cd docs-slate && ./deploy.sh", - "predocs": "node docs/prepare-docs.mjs", - "docs": "cd docs-slate && bundle exec middleman server", - "preversion": "npm test && npm run build", - "prepublishOnly": "npm run clean && npm test && npm run build" - }, - "browserslist": "defaults, not ie 11", - "prettier": { - "arrowParens": "avoid", - "semi": false, - "singleQuote": true, - "trailingComma": "none" - }, - "devDependencies": { - "@babel/core": "^7.12.10", - "@babel/plugin-transform-typescript": "^7.12.17", - "@babel/preset-env": "^7.12.11", - "@eslint/js": "^9.9.1", - "@rollup/plugin-babel": "^6.0.3", - "@rollup/plugin-replace": "^5.0.2", - "@rollup/plugin-typescript": "^11.0.0", - "@types/jest": "^29.2.4", - "@types/node": "^20.11.20", - "babel-jest": "^29.0.1", - "cross-env": "^7.0.3", - "eslint": "^9.9.1", - "eslint-config-prettier": "^9.0.0", - "fast-check": "^2.12.0", - "jest": "^29.0.1", - "jest-ts-webcompat-resolver": "^1.0.0", - "prettier": "^3.0.2", - "rollup": "^4.12.0", - "tslib": "^2.1.0", - "typescript": "^5.0.3", - "typescript-eslint": "^8.4.0" - }, - "engines": { - "node": ">= 14" - } -} diff --git a/node_modules/yaml/util.js b/node_modules/yaml/util.js deleted file mode 100644 index 070103f..0000000 --- a/node_modules/yaml/util.js +++ /dev/null @@ -1,2 +0,0 @@ -// Re-exporter for Node.js < 12.16.0 -module.exports = require('./dist/util.js') diff --git a/node_modules/yargs-parser/CHANGELOG.md b/node_modules/yargs-parser/CHANGELOG.md deleted file mode 100644 index 584eb86..0000000 --- a/node_modules/yargs-parser/CHANGELOG.md +++ /dev/null @@ -1,308 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - -## [21.1.1](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.1.0...yargs-parser-v21.1.1) (2022-08-04) - - -### Bug Fixes - -* **typescript:** ignore .cts files during publish ([#454](https://github.com/yargs/yargs-parser/issues/454)) ([d69f9c3](https://github.com/yargs/yargs-parser/commit/d69f9c3a91c3ad2f9494d0a94e29a8b76c41b81b)), closes [#452](https://github.com/yargs/yargs-parser/issues/452) - -## [21.1.0](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.0.1...yargs-parser-v21.1.0) (2022-08-03) - - -### Features - -* allow the browser build to be imported ([#443](https://github.com/yargs/yargs-parser/issues/443)) ([a89259f](https://github.com/yargs/yargs-parser/commit/a89259ff41d6f5312b3ce8a30bef343a993f395a)) - - -### Bug Fixes - -* **halt-at-non-option:** prevent known args from being parsed when "unknown-options-as-args" is enabled ([#438](https://github.com/yargs/yargs-parser/issues/438)) ([c474bc1](https://github.com/yargs/yargs-parser/commit/c474bc10c3aa0ae864b95e5722730114ef15f573)) -* node version check now uses process.versions.node ([#450](https://github.com/yargs/yargs-parser/issues/450)) ([d07bcdb](https://github.com/yargs/yargs-parser/commit/d07bcdbe43075f7201fbe8a08e491217247fe1f1)) -* parse options ending with 3+ hyphens ([#434](https://github.com/yargs/yargs-parser/issues/434)) ([4f1060b](https://github.com/yargs/yargs-parser/commit/4f1060b50759fadbac3315c5117b0c3d65b0a7d8)) - -### [21.0.1](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.0.0...yargs-parser-v21.0.1) (2022-02-27) - - -### Bug Fixes - -* return deno env object ([#432](https://github.com/yargs/yargs-parser/issues/432)) ([b00eb87](https://github.com/yargs/yargs-parser/commit/b00eb87b4860a890dd2dab0d6058241bbfd2b3ec)) - -## [21.0.0](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.9...yargs-parser-v21.0.0) (2021-11-15) - - -### ⚠ BREAKING CHANGES - -* drops support for 10 (#421) - -### Bug Fixes - -* esm json import ([#416](https://www.github.com/yargs/yargs-parser/issues/416)) ([90f970a](https://www.github.com/yargs/yargs-parser/commit/90f970a6482dd4f5b5eb18d38596dd6f02d73edf)) -* parser should preserve inner quotes ([#407](https://www.github.com/yargs/yargs-parser/issues/407)) ([ae11f49](https://www.github.com/yargs/yargs-parser/commit/ae11f496a8318ea8885aa25015d429b33713c314)) - - -### Code Refactoring - -* drops support for 10 ([#421](https://www.github.com/yargs/yargs-parser/issues/421)) ([3aaf878](https://www.github.com/yargs/yargs-parser/commit/3aaf8784f5c7f2aec6108c1c6a55537fa7e3b5c1)) - -### [20.2.9](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.8...yargs-parser-v20.2.9) (2021-06-20) - - -### Bug Fixes - -* **build:** fixed automated release pipeline ([1fe9135](https://www.github.com/yargs/yargs-parser/commit/1fe9135884790a083615419b2861683e2597dac3)) - -### [20.2.8](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.7...yargs-parser-v20.2.8) (2021-06-20) - - -### Bug Fixes - -* **locale:** Turkish camelize and decamelize issues with toLocaleLowerCase/toLocaleUpperCase ([2617303](https://www.github.com/yargs/yargs-parser/commit/261730383e02448562f737b94bbd1f164aed5143)) -* **perf:** address slow parse when using unknown-options-as-args ([#394](https://www.github.com/yargs/yargs-parser/issues/394)) ([441f059](https://www.github.com/yargs/yargs-parser/commit/441f059d585d446551068ad213db79ac91daf83a)) -* **string-utils:** detect [0,1] ranged values as numbers ([#388](https://www.github.com/yargs/yargs-parser/issues/388)) ([efcc32c](https://www.github.com/yargs/yargs-parser/commit/efcc32c2d6b09aba31abfa2db9bd947befe5586b)) - -### [20.2.7](https://www.github.com/yargs/yargs-parser/compare/v20.2.6...v20.2.7) (2021-03-10) - - -### Bug Fixes - -* **deno:** force release for Deno ([6687c97](https://www.github.com/yargs/yargs-parser/commit/6687c972d0f3ca7865a97908dde3080b05f8b026)) - -### [20.2.6](https://www.github.com/yargs/yargs-parser/compare/v20.2.5...v20.2.6) (2021-02-22) - - -### Bug Fixes - -* **populate--:** -- should always be array ([#354](https://www.github.com/yargs/yargs-parser/issues/354)) ([585ae8f](https://www.github.com/yargs/yargs-parser/commit/585ae8ffad74cc02974f92d788e750137fd65146)) - -### [20.2.5](https://www.github.com/yargs/yargs-parser/compare/v20.2.4...v20.2.5) (2021-02-13) - - -### Bug Fixes - -* do not lowercase camel cased string ([#348](https://www.github.com/yargs/yargs-parser/issues/348)) ([5f4da1f](https://www.github.com/yargs/yargs-parser/commit/5f4da1f17d9d50542d2aaa206c9806ce3e320335)) - -### [20.2.4](https://www.github.com/yargs/yargs-parser/compare/v20.2.3...v20.2.4) (2020-11-09) - - -### Bug Fixes - -* **deno:** address import issues in Deno ([#339](https://www.github.com/yargs/yargs-parser/issues/339)) ([3b54e5e](https://www.github.com/yargs/yargs-parser/commit/3b54e5eef6e9a7b7c6eec7c12bab3ba3b8ba8306)) - -### [20.2.3](https://www.github.com/yargs/yargs-parser/compare/v20.2.2...v20.2.3) (2020-10-16) - - -### Bug Fixes - -* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#336](https://www.github.com/yargs/yargs-parser/issues/336)) ([3ae7242](https://www.github.com/yargs/yargs-parser/commit/3ae7242040ff876d28dabded60ac226e00150c88)) - -### [20.2.2](https://www.github.com/yargs/yargs-parser/compare/v20.2.1...v20.2.2) (2020-10-14) - - -### Bug Fixes - -* **exports:** node 13.0-13.6 require a string fallback ([#333](https://www.github.com/yargs/yargs-parser/issues/333)) ([291aeda](https://www.github.com/yargs/yargs-parser/commit/291aeda06b685b7a015d83bdf2558e180b37388d)) - -### [20.2.1](https://www.github.com/yargs/yargs-parser/compare/v20.2.0...v20.2.1) (2020-10-01) - - -### Bug Fixes - -* **deno:** update types for deno ^1.4.0 ([#330](https://www.github.com/yargs/yargs-parser/issues/330)) ([0ab92e5](https://www.github.com/yargs/yargs-parser/commit/0ab92e50b090f11196334c048c9c92cecaddaf56)) - -## [20.2.0](https://www.github.com/yargs/yargs-parser/compare/v20.1.0...v20.2.0) (2020-09-21) - - -### Features - -* **string-utils:** export looksLikeNumber helper ([#324](https://www.github.com/yargs/yargs-parser/issues/324)) ([c8580a2](https://www.github.com/yargs/yargs-parser/commit/c8580a2327b55f6342acecb6e72b62963d506750)) - - -### Bug Fixes - -* **unknown-options-as-args:** convert positionals that look like numbers ([#326](https://www.github.com/yargs/yargs-parser/issues/326)) ([f85ebb4](https://www.github.com/yargs/yargs-parser/commit/f85ebb4face9d4b0f56147659404cbe0002f3dad)) - -## [20.1.0](https://www.github.com/yargs/yargs-parser/compare/v20.0.0...v20.1.0) (2020-09-20) - - -### Features - -* adds parse-positional-numbers configuration ([#321](https://www.github.com/yargs/yargs-parser/issues/321)) ([9cec00a](https://www.github.com/yargs/yargs-parser/commit/9cec00a622251292ffb7dce6f78f5353afaa0d4c)) - - -### Bug Fixes - -* **build:** update release-please; make labels kick off builds ([#323](https://www.github.com/yargs/yargs-parser/issues/323)) ([09f448b](https://www.github.com/yargs/yargs-parser/commit/09f448b4cd66e25d2872544718df46dab8af062a)) - -## [20.0.0](https://www.github.com/yargs/yargs-parser/compare/v19.0.4...v20.0.0) (2020-09-09) - - -### ⚠ BREAKING CHANGES - -* do not ship type definitions (#318) - -### Bug Fixes - -* only strip camel case if hyphenated ([#316](https://www.github.com/yargs/yargs-parser/issues/316)) ([95a9e78](https://www.github.com/yargs/yargs-parser/commit/95a9e785127b9bbf2d1db1f1f808ca1fb100e82a)), closes [#315](https://www.github.com/yargs/yargs-parser/issues/315) - - -### Code Refactoring - -* do not ship type definitions ([#318](https://www.github.com/yargs/yargs-parser/issues/318)) ([8fbd56f](https://www.github.com/yargs/yargs-parser/commit/8fbd56f1d0b6c44c30fca62708812151ca0ce330)) - -### [19.0.4](https://www.github.com/yargs/yargs-parser/compare/v19.0.3...v19.0.4) (2020-08-27) - - -### Bug Fixes - -* **build:** fixing publication ([#310](https://www.github.com/yargs/yargs-parser/issues/310)) ([5d3c6c2](https://www.github.com/yargs/yargs-parser/commit/5d3c6c29a9126248ba601920d9cf87c78e161ff5)) - -### [19.0.3](https://www.github.com/yargs/yargs-parser/compare/v19.0.2...v19.0.3) (2020-08-27) - - -### Bug Fixes - -* **build:** switch to action for publish ([#308](https://www.github.com/yargs/yargs-parser/issues/308)) ([5c2f305](https://www.github.com/yargs/yargs-parser/commit/5c2f30585342bcd8aaf926407c863099d256d174)) - -### [19.0.2](https://www.github.com/yargs/yargs-parser/compare/v19.0.1...v19.0.2) (2020-08-27) - - -### Bug Fixes - -* **types:** envPrefix should be optional ([#305](https://www.github.com/yargs/yargs-parser/issues/305)) ([ae3f180](https://www.github.com/yargs/yargs-parser/commit/ae3f180e14df2de2fd962145f4518f9aa0e76523)) - -### [19.0.1](https://www.github.com/yargs/yargs-parser/compare/v19.0.0...v19.0.1) (2020-08-09) - - -### Bug Fixes - -* **build:** push tag created for deno ([2186a14](https://www.github.com/yargs/yargs-parser/commit/2186a14989749887d56189867602e39e6679f8b0)) - -## [19.0.0](https://www.github.com/yargs/yargs-parser/compare/v18.1.3...v19.0.0) (2020-08-09) - - -### ⚠ BREAKING CHANGES - -* adds support for ESM and Deno (#295) -* **ts:** projects using `@types/yargs-parser` may see variations in type definitions. -* drops Node 6. begin following Node.js LTS schedule (#278) - -### Features - -* adds support for ESM and Deno ([#295](https://www.github.com/yargs/yargs-parser/issues/295)) ([195bc4a](https://www.github.com/yargs/yargs-parser/commit/195bc4a7f20c2a8f8e33fbb6ba96ef6e9a0120a1)) -* expose camelCase and decamelize helpers ([#296](https://www.github.com/yargs/yargs-parser/issues/296)) ([39154ce](https://www.github.com/yargs/yargs-parser/commit/39154ceb5bdcf76b5f59a9219b34cedb79b67f26)) -* **deps:** update to latest camelcase/decamelize ([#281](https://www.github.com/yargs/yargs-parser/issues/281)) ([8931ab0](https://www.github.com/yargs/yargs-parser/commit/8931ab08f686cc55286f33a95a83537da2be5516)) - - -### Bug Fixes - -* boolean numeric short option ([#294](https://www.github.com/yargs/yargs-parser/issues/294)) ([f600082](https://www.github.com/yargs/yargs-parser/commit/f600082c959e092076caf420bbbc9d7a231e2418)) -* raise permission error for Deno if config load fails ([#298](https://www.github.com/yargs/yargs-parser/issues/298)) ([1174e2b](https://www.github.com/yargs/yargs-parser/commit/1174e2b3f0c845a1cd64e14ffc3703e730567a84)) -* **deps:** update dependency decamelize to v3 ([#274](https://www.github.com/yargs/yargs-parser/issues/274)) ([4d98698](https://www.github.com/yargs/yargs-parser/commit/4d98698bc6767e84ec54a0842908191739be73b7)) -* **types:** switch back to using Partial types ([#293](https://www.github.com/yargs/yargs-parser/issues/293)) ([bdc80ba](https://www.github.com/yargs/yargs-parser/commit/bdc80ba59fa13bc3025ce0a85e8bad9f9da24ea7)) - - -### Build System - -* drops Node 6. begin following Node.js LTS schedule ([#278](https://www.github.com/yargs/yargs-parser/issues/278)) ([9014ed7](https://www.github.com/yargs/yargs-parser/commit/9014ed722a32768b96b829e65a31705db5c1458a)) - - -### Code Refactoring - -* **ts:** move index.js to TypeScript ([#292](https://www.github.com/yargs/yargs-parser/issues/292)) ([f78d2b9](https://www.github.com/yargs/yargs-parser/commit/f78d2b97567ac4828624406e420b4047c710b789)) - -### [18.1.3](https://www.github.com/yargs/yargs-parser/compare/v18.1.2...v18.1.3) (2020-04-16) - - -### Bug Fixes - -* **setArg:** options using camel-case and dot-notation populated twice ([#268](https://www.github.com/yargs/yargs-parser/issues/268)) ([f7e15b9](https://www.github.com/yargs/yargs-parser/commit/f7e15b9800900b9856acac1a830a5f35847be73e)) - -### [18.1.2](https://www.github.com/yargs/yargs-parser/compare/v18.1.1...v18.1.2) (2020-03-26) - - -### Bug Fixes - -* **array, nargs:** support -o=--value and --option=--value format ([#262](https://www.github.com/yargs/yargs-parser/issues/262)) ([41d3f81](https://www.github.com/yargs/yargs-parser/commit/41d3f8139e116706b28de9b0de3433feb08d2f13)) - -### [18.1.1](https://www.github.com/yargs/yargs-parser/compare/v18.1.0...v18.1.1) (2020-03-16) - - -### Bug Fixes - -* \_\_proto\_\_ will now be replaced with \_\_\_proto\_\_\_ in parse ([#258](https://www.github.com/yargs/yargs-parser/issues/258)), patching a potential -prototype pollution vulnerability. This was reported by the Snyk Security Research Team.([63810ca](https://www.github.com/yargs/yargs-parser/commit/63810ca1ae1a24b08293a4d971e70e058c7a41e2)) - -## [18.1.0](https://www.github.com/yargs/yargs-parser/compare/v18.0.0...v18.1.0) (2020-03-07) - - -### Features - -* introduce single-digit boolean aliases ([#255](https://www.github.com/yargs/yargs-parser/issues/255)) ([9c60265](https://www.github.com/yargs/yargs-parser/commit/9c60265fd7a03cb98e6df3e32c8c5e7508d9f56f)) - -## [18.0.0](https://www.github.com/yargs/yargs-parser/compare/v17.1.0...v18.0.0) (2020-03-02) - - -### ⚠ BREAKING CHANGES - -* the narg count is now enforced when parsing arrays. - -### Features - -* NaN can now be provided as a value for nargs, indicating "at least" one value is expected for array ([#251](https://www.github.com/yargs/yargs-parser/issues/251)) ([9db4be8](https://www.github.com/yargs/yargs-parser/commit/9db4be81417a2c7097128db34d86fe70ef4af70c)) - -## [17.1.0](https://www.github.com/yargs/yargs-parser/compare/v17.0.1...v17.1.0) (2020-03-01) - - -### Features - -* introduce greedy-arrays config, for specifying whether arrays consume multiple positionals ([#249](https://www.github.com/yargs/yargs-parser/issues/249)) ([60e880a](https://www.github.com/yargs/yargs-parser/commit/60e880a837046314d89fa4725f923837fd33a9eb)) - -### [17.0.1](https://www.github.com/yargs/yargs-parser/compare/v17.0.0...v17.0.1) (2020-02-29) - - -### Bug Fixes - -* normalized keys were not enumerable ([#247](https://www.github.com/yargs/yargs-parser/issues/247)) ([57119f9](https://www.github.com/yargs/yargs-parser/commit/57119f9f17cf27499bd95e61c2f72d18314f11ba)) - -## [17.0.0](https://www.github.com/yargs/yargs-parser/compare/v16.1.0...v17.0.0) (2020-02-10) - - -### ⚠ BREAKING CHANGES - -* this reverts parsing behavior of booleans to that of yargs@14 -* objects used during parsing are now created with a null -prototype. There may be some scenarios where this change in behavior -leaks externally. - -### Features - -* boolean arguments will not be collected into an implicit array ([#236](https://www.github.com/yargs/yargs-parser/issues/236)) ([34c4e19](https://www.github.com/yargs/yargs-parser/commit/34c4e19bae4e7af63e3cb6fa654a97ed476e5eb5)) -* introduce nargs-eats-options config option ([#246](https://www.github.com/yargs/yargs-parser/issues/246)) ([d50822a](https://www.github.com/yargs/yargs-parser/commit/d50822ac10e1b05f2e9643671ca131ac251b6732)) - - -### Bug Fixes - -* address bugs with "uknown-options-as-args" ([bc023e3](https://www.github.com/yargs/yargs-parser/commit/bc023e3b13e20a118353f9507d1c999bf388a346)) -* array should take precedence over nargs, but enforce nargs ([#243](https://www.github.com/yargs/yargs-parser/issues/243)) ([4cbc188](https://www.github.com/yargs/yargs-parser/commit/4cbc188b7abb2249529a19c090338debdad2fe6c)) -* support keys that collide with object prototypes ([#234](https://www.github.com/yargs/yargs-parser/issues/234)) ([1587b6d](https://www.github.com/yargs/yargs-parser/commit/1587b6d91db853a9109f1be6b209077993fee4de)) -* unknown options terminated with digits now handled by unknown-options-as-args ([#238](https://www.github.com/yargs/yargs-parser/issues/238)) ([d36cdfa](https://www.github.com/yargs/yargs-parser/commit/d36cdfa854254d7c7e0fe1d583818332ac46c2a5)) - -## [16.1.0](https://www.github.com/yargs/yargs-parser/compare/v16.0.0...v16.1.0) (2019-11-01) - - -### ⚠ BREAKING CHANGES - -* populate error if incompatible narg/count or array/count options are used (#191) - -### Features - -* options that have had their default value used are now tracked ([#211](https://www.github.com/yargs/yargs-parser/issues/211)) ([a525234](https://www.github.com/yargs/yargs-parser/commit/a525234558c847deedd73f8792e0a3b77b26e2c0)) -* populate error if incompatible narg/count or array/count options are used ([#191](https://www.github.com/yargs/yargs-parser/issues/191)) ([84a401f](https://www.github.com/yargs/yargs-parser/commit/84a401f0fa3095e0a19661670d1570d0c3b9d3c9)) - - -### Reverts - -* revert 16.0.0 CHANGELOG entry ([920320a](https://www.github.com/yargs/yargs-parser/commit/920320ad9861bbfd58eda39221ae211540fc1daf)) diff --git a/node_modules/yargs-parser/LICENSE.txt b/node_modules/yargs-parser/LICENSE.txt deleted file mode 100644 index 836440b..0000000 --- a/node_modules/yargs-parser/LICENSE.txt +++ /dev/null @@ -1,14 +0,0 @@ -Copyright (c) 2016, Contributors - -Permission to use, copy, modify, and/or distribute this software -for any purpose with or without fee is hereby granted, provided -that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE -LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES -OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, -ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/yargs-parser/README.md b/node_modules/yargs-parser/README.md deleted file mode 100644 index 2614840..0000000 --- a/node_modules/yargs-parser/README.md +++ /dev/null @@ -1,518 +0,0 @@ -# yargs-parser - -![ci](https://github.com/yargs/yargs-parser/workflows/ci/badge.svg) -[![NPM version](https://img.shields.io/npm/v/yargs-parser.svg)](https://www.npmjs.com/package/yargs-parser) -[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) -![nycrc config on GitHub](https://img.shields.io/nycrc/yargs/yargs-parser) - -The mighty option parser used by [yargs](https://github.com/yargs/yargs). - -visit the [yargs website](http://yargs.js.org/) for more examples, and thorough usage instructions. - - - -## Example - -```sh -npm i yargs-parser --save -``` - -```js -const argv = require('yargs-parser')(process.argv.slice(2)) -console.log(argv) -``` - -```console -$ node example.js --foo=33 --bar hello -{ _: [], foo: 33, bar: 'hello' } -``` - -_or parse a string!_ - -```js -const argv = require('yargs-parser')('--foo=99 --bar=33') -console.log(argv) -``` - -```console -{ _: [], foo: 99, bar: 33 } -``` - -Convert an array of mixed types before passing to `yargs-parser`: - -```js -const parse = require('yargs-parser') -parse(['-f', 11, '--zoom', 55].join(' ')) // <-- array to string -parse(['-f', 11, '--zoom', 55].map(String)) // <-- array of strings -``` - -## Deno Example - -As of `v19` `yargs-parser` supports [Deno](https://github.com/denoland/deno): - -```typescript -import parser from "https://deno.land/x/yargs_parser/deno.ts"; - -const argv = parser('--foo=99 --bar=9987930', { - string: ['bar'] -}) -console.log(argv) -``` - -## ESM Example - -As of `v19` `yargs-parser` supports ESM (_both in Node.js and in the browser_): - -**Node.js:** - -```js -import parser from 'yargs-parser' - -const argv = parser('--foo=99 --bar=9987930', { - string: ['bar'] -}) -console.log(argv) -``` - -**Browsers:** - -```html - - - - -``` - -## API - -### parser(args, opts={}) - -Parses command line arguments returning a simple mapping of keys and values. - -**expects:** - -* `args`: a string or array of strings representing the options to parse. -* `opts`: provide a set of hints indicating how `args` should be parsed: - * `opts.alias`: an object representing the set of aliases for a key: `{alias: {foo: ['f']}}`. - * `opts.array`: indicate that keys should be parsed as an array: `{array: ['foo', 'bar']}`.
    - Indicate that keys should be parsed as an array and coerced to booleans / numbers:
    - `{array: [{ key: 'foo', boolean: true }, {key: 'bar', number: true}]}`. - * `opts.boolean`: arguments should be parsed as booleans: `{boolean: ['x', 'y']}`. - * `opts.coerce`: provide a custom synchronous function that returns a coerced value from the argument provided - (or throws an error). For arrays the function is called only once for the entire array:
    - `{coerce: {foo: function (arg) {return modifiedArg}}}`. - * `opts.config`: indicate a key that represents a path to a configuration file (this file will be loaded and parsed). - * `opts.configObjects`: configuration objects to parse, their properties will be set as arguments:
    - `{configObjects: [{'x': 5, 'y': 33}, {'z': 44}]}`. - * `opts.configuration`: provide configuration options to the yargs-parser (see: [configuration](#configuration)). - * `opts.count`: indicate a key that should be used as a counter, e.g., `-vvv` = `{v: 3}`. - * `opts.default`: provide default values for keys: `{default: {x: 33, y: 'hello world!'}}`. - * `opts.envPrefix`: environment variables (`process.env`) with the prefix provided should be parsed. - * `opts.narg`: specify that a key requires `n` arguments: `{narg: {x: 2}}`. - * `opts.normalize`: `path.normalize()` will be applied to values set to this key. - * `opts.number`: keys should be treated as numbers. - * `opts.string`: keys should be treated as strings (even if they resemble a number `-x 33`). - -**returns:** - -* `obj`: an object representing the parsed value of `args` - * `key/value`: key value pairs for each argument and their aliases. - * `_`: an array representing the positional arguments. - * [optional] `--`: an array with arguments after the end-of-options flag `--`. - -### require('yargs-parser').detailed(args, opts={}) - -Parses a command line string, returning detailed information required by the -yargs engine. - -**expects:** - -* `args`: a string or array of strings representing options to parse. -* `opts`: provide a set of hints indicating how `args`, inputs are identical to `require('yargs-parser')(args, opts={})`. - -**returns:** - -* `argv`: an object representing the parsed value of `args` - * `key/value`: key value pairs for each argument and their aliases. - * `_`: an array representing the positional arguments. - * [optional] `--`: an array with arguments after the end-of-options flag `--`. -* `error`: populated with an error object if an exception occurred during parsing. -* `aliases`: the inferred list of aliases built by combining lists in `opts.alias`. -* `newAliases`: any new aliases added via camel-case expansion: - * `boolean`: `{ fooBar: true }` -* `defaulted`: any new argument created by `opts.default`, no aliases included. - * `boolean`: `{ foo: true }` -* `configuration`: given by default settings and `opts.configuration`. - - - -### Configuration - -The yargs-parser applies several automated transformations on the keys provided -in `args`. These features can be turned on and off using the `configuration` field -of `opts`. - -```js -var parsed = parser(['--no-dice'], { - configuration: { - 'boolean-negation': false - } -}) -``` - -### short option groups - -* default: `true`. -* key: `short-option-groups`. - -Should a group of short-options be treated as boolean flags? - -```console -$ node example.js -abc -{ _: [], a: true, b: true, c: true } -``` - -_if disabled:_ - -```console -$ node example.js -abc -{ _: [], abc: true } -``` - -### camel-case expansion - -* default: `true`. -* key: `camel-case-expansion`. - -Should hyphenated arguments be expanded into camel-case aliases? - -```console -$ node example.js --foo-bar -{ _: [], 'foo-bar': true, fooBar: true } -``` - -_if disabled:_ - -```console -$ node example.js --foo-bar -{ _: [], 'foo-bar': true } -``` - -### dot-notation - -* default: `true` -* key: `dot-notation` - -Should keys that contain `.` be treated as objects? - -```console -$ node example.js --foo.bar -{ _: [], foo: { bar: true } } -``` - -_if disabled:_ - -```console -$ node example.js --foo.bar -{ _: [], "foo.bar": true } -``` - -### parse numbers - -* default: `true` -* key: `parse-numbers` - -Should keys that look like numbers be treated as such? - -```console -$ node example.js --foo=99.3 -{ _: [], foo: 99.3 } -``` - -_if disabled:_ - -```console -$ node example.js --foo=99.3 -{ _: [], foo: "99.3" } -``` - -### parse positional numbers - -* default: `true` -* key: `parse-positional-numbers` - -Should positional keys that look like numbers be treated as such. - -```console -$ node example.js 99.3 -{ _: [99.3] } -``` - -_if disabled:_ - -```console -$ node example.js 99.3 -{ _: ['99.3'] } -``` - -### boolean negation - -* default: `true` -* key: `boolean-negation` - -Should variables prefixed with `--no` be treated as negations? - -```console -$ node example.js --no-foo -{ _: [], foo: false } -``` - -_if disabled:_ - -```console -$ node example.js --no-foo -{ _: [], "no-foo": true } -``` - -### combine arrays - -* default: `false` -* key: `combine-arrays` - -Should arrays be combined when provided by both command line arguments and -a configuration file. - -### duplicate arguments array - -* default: `true` -* key: `duplicate-arguments-array` - -Should arguments be coerced into an array when duplicated: - -```console -$ node example.js -x 1 -x 2 -{ _: [], x: [1, 2] } -``` - -_if disabled:_ - -```console -$ node example.js -x 1 -x 2 -{ _: [], x: 2 } -``` - -### flatten duplicate arrays - -* default: `true` -* key: `flatten-duplicate-arrays` - -Should array arguments be coerced into a single array when duplicated: - -```console -$ node example.js -x 1 2 -x 3 4 -{ _: [], x: [1, 2, 3, 4] } -``` - -_if disabled:_ - -```console -$ node example.js -x 1 2 -x 3 4 -{ _: [], x: [[1, 2], [3, 4]] } -``` - -### greedy arrays - -* default: `true` -* key: `greedy-arrays` - -Should arrays consume more than one positional argument following their flag. - -```console -$ node example --arr 1 2 -{ _: [], arr: [1, 2] } -``` - -_if disabled:_ - -```console -$ node example --arr 1 2 -{ _: [2], arr: [1] } -``` - -**Note: in `v18.0.0` we are considering defaulting greedy arrays to `false`.** - -### nargs eats options - -* default: `false` -* key: `nargs-eats-options` - -Should nargs consume dash options as well as positional arguments. - -### negation prefix - -* default: `no-` -* key: `negation-prefix` - -The prefix to use for negated boolean variables. - -```console -$ node example.js --no-foo -{ _: [], foo: false } -``` - -_if set to `quux`:_ - -```console -$ node example.js --quuxfoo -{ _: [], foo: false } -``` - -### populate -- - -* default: `false`. -* key: `populate--` - -Should unparsed flags be stored in `--` or `_`. - -_If disabled:_ - -```console -$ node example.js a -b -- x y -{ _: [ 'a', 'x', 'y' ], b: true } -``` - -_If enabled:_ - -```console -$ node example.js a -b -- x y -{ _: [ 'a' ], '--': [ 'x', 'y' ], b: true } -``` - -### set placeholder key - -* default: `false`. -* key: `set-placeholder-key`. - -Should a placeholder be added for keys not set via the corresponding CLI argument? - -_If disabled:_ - -```console -$ node example.js -a 1 -c 2 -{ _: [], a: 1, c: 2 } -``` - -_If enabled:_ - -```console -$ node example.js -a 1 -c 2 -{ _: [], a: 1, b: undefined, c: 2 } -``` - -### halt at non-option - -* default: `false`. -* key: `halt-at-non-option`. - -Should parsing stop at the first positional argument? This is similar to how e.g. `ssh` parses its command line. - -_If disabled:_ - -```console -$ node example.js -a run b -x y -{ _: [ 'b' ], a: 'run', x: 'y' } -``` - -_If enabled:_ - -```console -$ node example.js -a run b -x y -{ _: [ 'b', '-x', 'y' ], a: 'run' } -``` - -### strip aliased - -* default: `false` -* key: `strip-aliased` - -Should aliases be removed before returning results? - -_If disabled:_ - -```console -$ node example.js --test-field 1 -{ _: [], 'test-field': 1, testField: 1, 'test-alias': 1, testAlias: 1 } -``` - -_If enabled:_ - -```console -$ node example.js --test-field 1 -{ _: [], 'test-field': 1, testField: 1 } -``` - -### strip dashed - -* default: `false` -* key: `strip-dashed` - -Should dashed keys be removed before returning results? This option has no effect if -`camel-case-expansion` is disabled. - -_If disabled:_ - -```console -$ node example.js --test-field 1 -{ _: [], 'test-field': 1, testField: 1 } -``` - -_If enabled:_ - -```console -$ node example.js --test-field 1 -{ _: [], testField: 1 } -``` - -### unknown options as args - -* default: `false` -* key: `unknown-options-as-args` - -Should unknown options be treated like regular arguments? An unknown option is one that is not -configured in `opts`. - -_If disabled_ - -```console -$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 -{ _: [], unknownOption: true, knownOption: 2, stringOption: '', unknownOption2: true } -``` - -_If enabled_ - -```console -$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 -{ _: ['--unknown-option'], knownOption: 2, stringOption: '--unknown-option2' } -``` - -## Supported Node.js Versions - -Libraries in this ecosystem make a best effort to track -[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a -post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a). - -## Special Thanks - -The yargs project evolves from optimist and minimist. It owes its -existence to a lot of James Halliday's hard work. Thanks [substack](https://github.com/substack) **beep** **boop** \o/ - -## License - -ISC diff --git a/node_modules/yargs-parser/browser.js b/node_modules/yargs-parser/browser.js deleted file mode 100644 index 241202c..0000000 --- a/node_modules/yargs-parser/browser.js +++ /dev/null @@ -1,29 +0,0 @@ -// Main entrypoint for ESM web browser environments. Avoids using Node.js -// specific libraries, such as "path". -// -// TODO: figure out reasonable web equivalents for "resolve", "normalize", etc. -import { camelCase, decamelize, looksLikeNumber } from './build/lib/string-utils.js' -import { YargsParser } from './build/lib/yargs-parser.js' -const parser = new YargsParser({ - cwd: () => { return '' }, - format: (str, arg) => { return str.replace('%s', arg) }, - normalize: (str) => { return str }, - resolve: (str) => { return str }, - require: () => { - throw Error('loading config from files not currently supported in browser') - }, - env: () => {} -}) - -const yargsParser = function Parser (args, opts) { - const result = parser.parse(args.slice(), opts) - return result.argv -} -yargsParser.detailed = function (args, opts) { - return parser.parse(args.slice(), opts) -} -yargsParser.camelCase = camelCase -yargsParser.decamelize = decamelize -yargsParser.looksLikeNumber = looksLikeNumber - -export default yargsParser diff --git a/node_modules/yargs-parser/build/index.cjs b/node_modules/yargs-parser/build/index.cjs deleted file mode 100644 index cf6f50f..0000000 --- a/node_modules/yargs-parser/build/index.cjs +++ /dev/null @@ -1,1050 +0,0 @@ -'use strict'; - -var util = require('util'); -var path = require('path'); -var fs = require('fs'); - -function camelCase(str) { - const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); - if (!isCamelCase) { - str = str.toLowerCase(); - } - if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { - return str; - } - else { - let camelcase = ''; - let nextChrUpper = false; - const leadingHyphens = str.match(/^-+/); - for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { - let chr = str.charAt(i); - if (nextChrUpper) { - nextChrUpper = false; - chr = chr.toUpperCase(); - } - if (i !== 0 && (chr === '-' || chr === '_')) { - nextChrUpper = true; - } - else if (chr !== '-' && chr !== '_') { - camelcase += chr; - } - } - return camelcase; - } -} -function decamelize(str, joinString) { - const lowercase = str.toLowerCase(); - joinString = joinString || '-'; - let notCamelcase = ''; - for (let i = 0; i < str.length; i++) { - const chrLower = lowercase.charAt(i); - const chrString = str.charAt(i); - if (chrLower !== chrString && i > 0) { - notCamelcase += `${joinString}${lowercase.charAt(i)}`; - } - else { - notCamelcase += chrString; - } - } - return notCamelcase; -} -function looksLikeNumber(x) { - if (x === null || x === undefined) - return false; - if (typeof x === 'number') - return true; - if (/^0x[0-9a-f]+$/i.test(x)) - return true; - if (/^0[^.]/.test(x)) - return false; - return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); -} - -function tokenizeArgString(argString) { - if (Array.isArray(argString)) { - return argString.map(e => typeof e !== 'string' ? e + '' : e); - } - argString = argString.trim(); - let i = 0; - let prevC = null; - let c = null; - let opening = null; - const args = []; - for (let ii = 0; ii < argString.length; ii++) { - prevC = c; - c = argString.charAt(ii); - if (c === ' ' && !opening) { - if (!(prevC === ' ')) { - i++; - } - continue; - } - if (c === opening) { - opening = null; - } - else if ((c === "'" || c === '"') && !opening) { - opening = c; - } - if (!args[i]) - args[i] = ''; - args[i] += c; - } - return args; -} - -var DefaultValuesForTypeKey; -(function (DefaultValuesForTypeKey) { - DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; - DefaultValuesForTypeKey["STRING"] = "string"; - DefaultValuesForTypeKey["NUMBER"] = "number"; - DefaultValuesForTypeKey["ARRAY"] = "array"; -})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); - -let mixin; -class YargsParser { - constructor(_mixin) { - mixin = _mixin; - } - parse(argsInput, options) { - const opts = Object.assign({ - alias: undefined, - array: undefined, - boolean: undefined, - config: undefined, - configObjects: undefined, - configuration: undefined, - coerce: undefined, - count: undefined, - default: undefined, - envPrefix: undefined, - narg: undefined, - normalize: undefined, - string: undefined, - number: undefined, - __: undefined, - key: undefined - }, options); - const args = tokenizeArgString(argsInput); - const inputIsString = typeof argsInput === 'string'; - const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); - const configuration = Object.assign({ - 'boolean-negation': true, - 'camel-case-expansion': true, - 'combine-arrays': false, - 'dot-notation': true, - 'duplicate-arguments-array': true, - 'flatten-duplicate-arrays': true, - 'greedy-arrays': true, - 'halt-at-non-option': false, - 'nargs-eats-options': false, - 'negation-prefix': 'no-', - 'parse-numbers': true, - 'parse-positional-numbers': true, - 'populate--': false, - 'set-placeholder-key': false, - 'short-option-groups': true, - 'strip-aliased': false, - 'strip-dashed': false, - 'unknown-options-as-args': false - }, opts.configuration); - const defaults = Object.assign(Object.create(null), opts.default); - const configObjects = opts.configObjects || []; - const envPrefix = opts.envPrefix; - const notFlagsOption = configuration['populate--']; - const notFlagsArgv = notFlagsOption ? '--' : '_'; - const newAliases = Object.create(null); - const defaulted = Object.create(null); - const __ = opts.__ || mixin.format; - const flags = { - aliases: Object.create(null), - arrays: Object.create(null), - bools: Object.create(null), - strings: Object.create(null), - numbers: Object.create(null), - counts: Object.create(null), - normalize: Object.create(null), - configs: Object.create(null), - nargs: Object.create(null), - coercions: Object.create(null), - keys: [] - }; - const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; - const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); - [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { - const key = typeof opt === 'object' ? opt.key : opt; - const assignment = Object.keys(opt).map(function (key) { - const arrayFlagKeys = { - boolean: 'bools', - string: 'strings', - number: 'numbers' - }; - return arrayFlagKeys[key]; - }).filter(Boolean).pop(); - if (assignment) { - flags[assignment][key] = true; - } - flags.arrays[key] = true; - flags.keys.push(key); - }); - [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { - flags.bools[key] = true; - flags.keys.push(key); - }); - [].concat(opts.string || []).filter(Boolean).forEach(function (key) { - flags.strings[key] = true; - flags.keys.push(key); - }); - [].concat(opts.number || []).filter(Boolean).forEach(function (key) { - flags.numbers[key] = true; - flags.keys.push(key); - }); - [].concat(opts.count || []).filter(Boolean).forEach(function (key) { - flags.counts[key] = true; - flags.keys.push(key); - }); - [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { - flags.normalize[key] = true; - flags.keys.push(key); - }); - if (typeof opts.narg === 'object') { - Object.entries(opts.narg).forEach(([key, value]) => { - if (typeof value === 'number') { - flags.nargs[key] = value; - flags.keys.push(key); - } - }); - } - if (typeof opts.coerce === 'object') { - Object.entries(opts.coerce).forEach(([key, value]) => { - if (typeof value === 'function') { - flags.coercions[key] = value; - flags.keys.push(key); - } - }); - } - if (typeof opts.config !== 'undefined') { - if (Array.isArray(opts.config) || typeof opts.config === 'string') { - [].concat(opts.config).filter(Boolean).forEach(function (key) { - flags.configs[key] = true; - }); - } - else if (typeof opts.config === 'object') { - Object.entries(opts.config).forEach(([key, value]) => { - if (typeof value === 'boolean' || typeof value === 'function') { - flags.configs[key] = value; - } - }); - } - } - extendAliases(opts.key, aliases, opts.default, flags.arrays); - Object.keys(defaults).forEach(function (key) { - (flags.aliases[key] || []).forEach(function (alias) { - defaults[alias] = defaults[key]; - }); - }); - let error = null; - checkConfiguration(); - let notFlags = []; - const argv = Object.assign(Object.create(null), { _: [] }); - const argvReturn = {}; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - const truncatedArg = arg.replace(/^-{3,}/, '---'); - let broken; - let key; - let letters; - let m; - let next; - let value; - if (arg !== '--' && /^-/.test(arg) && isUnknownOptionAsArg(arg)) { - pushPositional(arg); - } - else if (truncatedArg.match(/^---+(=|$)/)) { - pushPositional(arg); - continue; - } - else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { - m = arg.match(/^--?([^=]+)=([\s\S]*)$/); - if (m !== null && Array.isArray(m) && m.length >= 3) { - if (checkAllAliases(m[1], flags.arrays)) { - i = eatArray(i, m[1], args, m[2]); - } - else if (checkAllAliases(m[1], flags.nargs) !== false) { - i = eatNargs(i, m[1], args, m[2]); - } - else { - setArg(m[1], m[2], true); - } - } - } - else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { - m = arg.match(negatedBoolean); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); - } - } - else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { - m = arg.match(/^--?(.+)/); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - if (checkAllAliases(key, flags.arrays)) { - i = eatArray(i, key, args); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - i = eatNargs(i, key, args); - } - else { - next = args[i + 1]; - if (next !== undefined && (!next.match(/^-/) || - next.match(negative)) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else if (/^(true|false)$/.test(next)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - } - else if (arg.match(/^-.\..+=/)) { - m = arg.match(/^-([^=]+)=([\s\S]*)$/); - if (m !== null && Array.isArray(m) && m.length >= 3) { - setArg(m[1], m[2]); - } - } - else if (arg.match(/^-.\..+/) && !arg.match(negative)) { - next = args[i + 1]; - m = arg.match(/^-(.\..+)/); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - if (next !== undefined && !next.match(/^-/) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { - letters = arg.slice(1, -1).split(''); - broken = false; - for (let j = 0; j < letters.length; j++) { - next = arg.slice(j + 2); - if (letters[j + 1] && letters[j + 1] === '=') { - value = arg.slice(j + 3); - key = letters[j]; - if (checkAllAliases(key, flags.arrays)) { - i = eatArray(i, key, args, value); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - i = eatNargs(i, key, args, value); - } - else { - setArg(key, value); - } - broken = true; - break; - } - if (next === '-') { - setArg(letters[j], next); - continue; - } - if (/[A-Za-z]/.test(letters[j]) && - /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && - checkAllAliases(next, flags.bools) === false) { - setArg(letters[j], next); - broken = true; - break; - } - if (letters[j + 1] && letters[j + 1].match(/\W/)) { - setArg(letters[j], next); - broken = true; - break; - } - else { - setArg(letters[j], defaultValue(letters[j])); - } - } - key = arg.slice(-1)[0]; - if (!broken && key !== '-') { - if (checkAllAliases(key, flags.arrays)) { - i = eatArray(i, key, args); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - i = eatNargs(i, key, args); - } - else { - next = args[i + 1]; - if (next !== undefined && (!/^(-|--)[^-]/.test(next) || - next.match(negative)) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else if (/^(true|false)$/.test(next)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - } - else if (arg.match(/^-[0-9]$/) && - arg.match(negative) && - checkAllAliases(arg.slice(1), flags.bools)) { - key = arg.slice(1); - setArg(key, defaultValue(key)); - } - else if (arg === '--') { - notFlags = args.slice(i + 1); - break; - } - else if (configuration['halt-at-non-option']) { - notFlags = args.slice(i); - break; - } - else { - pushPositional(arg); - } - } - applyEnvVars(argv, true); - applyEnvVars(argv, false); - setConfig(argv); - setConfigObjects(); - applyDefaultsAndAliases(argv, flags.aliases, defaults, true); - applyCoercions(argv); - if (configuration['set-placeholder-key']) - setPlaceholderKeys(argv); - Object.keys(flags.counts).forEach(function (key) { - if (!hasKey(argv, key.split('.'))) - setArg(key, 0); - }); - if (notFlagsOption && notFlags.length) - argv[notFlagsArgv] = []; - notFlags.forEach(function (key) { - argv[notFlagsArgv].push(key); - }); - if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { - Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { - delete argv[key]; - }); - } - if (configuration['strip-aliased']) { - [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { - if (configuration['camel-case-expansion'] && alias.includes('-')) { - delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; - } - delete argv[alias]; - }); - } - function pushPositional(arg) { - const maybeCoercedNumber = maybeCoerceNumber('_', arg); - if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { - argv._.push(maybeCoercedNumber); - } - } - function eatNargs(i, key, args, argAfterEqualSign) { - let ii; - let toEat = checkAllAliases(key, flags.nargs); - toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; - if (toEat === 0) { - if (!isUndefined(argAfterEqualSign)) { - error = Error(__('Argument unexpected for: %s', key)); - } - setArg(key, defaultValue(key)); - return i; - } - let available = isUndefined(argAfterEqualSign) ? 0 : 1; - if (configuration['nargs-eats-options']) { - if (args.length - (i + 1) + available < toEat) { - error = Error(__('Not enough arguments following: %s', key)); - } - available = toEat; - } - else { - for (ii = i + 1; ii < args.length; ii++) { - if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) - available++; - else - break; - } - if (available < toEat) - error = Error(__('Not enough arguments following: %s', key)); - } - let consumed = Math.min(available, toEat); - if (!isUndefined(argAfterEqualSign) && consumed > 0) { - setArg(key, argAfterEqualSign); - consumed--; - } - for (ii = i + 1; ii < (consumed + i + 1); ii++) { - setArg(key, args[ii]); - } - return (i + consumed); - } - function eatArray(i, key, args, argAfterEqualSign) { - let argsToSet = []; - let next = argAfterEqualSign || args[i + 1]; - const nargsCount = checkAllAliases(key, flags.nargs); - if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { - argsToSet.push(true); - } - else if (isUndefined(next) || - (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { - if (defaults[key] !== undefined) { - const defVal = defaults[key]; - argsToSet = Array.isArray(defVal) ? defVal : [defVal]; - } - } - else { - if (!isUndefined(argAfterEqualSign)) { - argsToSet.push(processValue(key, argAfterEqualSign, true)); - } - for (let ii = i + 1; ii < args.length; ii++) { - if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || - (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) - break; - next = args[ii]; - if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) - break; - i = ii; - argsToSet.push(processValue(key, next, inputIsString)); - } - } - if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || - (isNaN(nargsCount) && argsToSet.length === 0))) { - error = Error(__('Not enough arguments following: %s', key)); - } - setArg(key, argsToSet); - return i; - } - function setArg(key, val, shouldStripQuotes = inputIsString) { - if (/-/.test(key) && configuration['camel-case-expansion']) { - const alias = key.split('.').map(function (prop) { - return camelCase(prop); - }).join('.'); - addNewAlias(key, alias); - } - const value = processValue(key, val, shouldStripQuotes); - const splitKey = key.split('.'); - setKey(argv, splitKey, value); - if (flags.aliases[key]) { - flags.aliases[key].forEach(function (x) { - const keyProperties = x.split('.'); - setKey(argv, keyProperties, value); - }); - } - if (splitKey.length > 1 && configuration['dot-notation']) { - (flags.aliases[splitKey[0]] || []).forEach(function (x) { - let keyProperties = x.split('.'); - const a = [].concat(splitKey); - a.shift(); - keyProperties = keyProperties.concat(a); - if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { - setKey(argv, keyProperties, value); - } - }); - } - if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { - const keys = [key].concat(flags.aliases[key] || []); - keys.forEach(function (key) { - Object.defineProperty(argvReturn, key, { - enumerable: true, - get() { - return val; - }, - set(value) { - val = typeof value === 'string' ? mixin.normalize(value) : value; - } - }); - }); - } - } - function addNewAlias(key, alias) { - if (!(flags.aliases[key] && flags.aliases[key].length)) { - flags.aliases[key] = [alias]; - newAliases[alias] = true; - } - if (!(flags.aliases[alias] && flags.aliases[alias].length)) { - addNewAlias(alias, key); - } - } - function processValue(key, val, shouldStripQuotes) { - if (shouldStripQuotes) { - val = stripQuotes(val); - } - if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { - if (typeof val === 'string') - val = val === 'true'; - } - let value = Array.isArray(val) - ? val.map(function (v) { return maybeCoerceNumber(key, v); }) - : maybeCoerceNumber(key, val); - if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { - value = increment(); - } - if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { - if (Array.isArray(val)) - value = val.map((val) => { return mixin.normalize(val); }); - else - value = mixin.normalize(val); - } - return value; - } - function maybeCoerceNumber(key, value) { - if (!configuration['parse-positional-numbers'] && key === '_') - return value; - if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { - const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); - if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { - value = Number(value); - } - } - return value; - } - function setConfig(argv) { - const configLookup = Object.create(null); - applyDefaultsAndAliases(configLookup, flags.aliases, defaults); - Object.keys(flags.configs).forEach(function (configKey) { - const configPath = argv[configKey] || configLookup[configKey]; - if (configPath) { - try { - let config = null; - const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); - const resolveConfig = flags.configs[configKey]; - if (typeof resolveConfig === 'function') { - try { - config = resolveConfig(resolvedConfigPath); - } - catch (e) { - config = e; - } - if (config instanceof Error) { - error = config; - return; - } - } - else { - config = mixin.require(resolvedConfigPath); - } - setConfigObject(config); - } - catch (ex) { - if (ex.name === 'PermissionDenied') - error = ex; - else if (argv[configKey]) - error = Error(__('Invalid JSON config file: %s', configPath)); - } - } - }); - } - function setConfigObject(config, prev) { - Object.keys(config).forEach(function (key) { - const value = config[key]; - const fullKey = prev ? prev + '.' + key : key; - if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { - setConfigObject(value, fullKey); - } - else { - if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { - setArg(fullKey, value); - } - } - }); - } - function setConfigObjects() { - if (typeof configObjects !== 'undefined') { - configObjects.forEach(function (configObject) { - setConfigObject(configObject); - }); - } - } - function applyEnvVars(argv, configOnly) { - if (typeof envPrefix === 'undefined') - return; - const prefix = typeof envPrefix === 'string' ? envPrefix : ''; - const env = mixin.env(); - Object.keys(env).forEach(function (envVar) { - if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { - const keys = envVar.split('__').map(function (key, i) { - if (i === 0) { - key = key.substring(prefix.length); - } - return camelCase(key); - }); - if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { - setArg(keys.join('.'), env[envVar]); - } - } - }); - } - function applyCoercions(argv) { - let coerce; - const applied = new Set(); - Object.keys(argv).forEach(function (key) { - if (!applied.has(key)) { - coerce = checkAllAliases(key, flags.coercions); - if (typeof coerce === 'function') { - try { - const value = maybeCoerceNumber(key, coerce(argv[key])); - ([].concat(flags.aliases[key] || [], key)).forEach(ali => { - applied.add(ali); - argv[ali] = value; - }); - } - catch (err) { - error = err; - } - } - } - }); - } - function setPlaceholderKeys(argv) { - flags.keys.forEach((key) => { - if (~key.indexOf('.')) - return; - if (typeof argv[key] === 'undefined') - argv[key] = undefined; - }); - return argv; - } - function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { - Object.keys(defaults).forEach(function (key) { - if (!hasKey(obj, key.split('.'))) { - setKey(obj, key.split('.'), defaults[key]); - if (canLog) - defaulted[key] = true; - (aliases[key] || []).forEach(function (x) { - if (hasKey(obj, x.split('.'))) - return; - setKey(obj, x.split('.'), defaults[key]); - }); - } - }); - } - function hasKey(obj, keys) { - let o = obj; - if (!configuration['dot-notation']) - keys = [keys.join('.')]; - keys.slice(0, -1).forEach(function (key) { - o = (o[key] || {}); - }); - const key = keys[keys.length - 1]; - if (typeof o !== 'object') - return false; - else - return key in o; - } - function setKey(obj, keys, value) { - let o = obj; - if (!configuration['dot-notation']) - keys = [keys.join('.')]; - keys.slice(0, -1).forEach(function (key) { - key = sanitizeKey(key); - if (typeof o === 'object' && o[key] === undefined) { - o[key] = {}; - } - if (typeof o[key] !== 'object' || Array.isArray(o[key])) { - if (Array.isArray(o[key])) { - o[key].push({}); - } - else { - o[key] = [o[key], {}]; - } - o = o[key][o[key].length - 1]; - } - else { - o = o[key]; - } - }); - const key = sanitizeKey(keys[keys.length - 1]); - const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); - const isValueArray = Array.isArray(value); - let duplicate = configuration['duplicate-arguments-array']; - if (!duplicate && checkAllAliases(key, flags.nargs)) { - duplicate = true; - if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { - o[key] = undefined; - } - } - if (value === increment()) { - o[key] = increment(o[key]); - } - else if (Array.isArray(o[key])) { - if (duplicate && isTypeArray && isValueArray) { - o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); - } - else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { - o[key] = value; - } - else { - o[key] = o[key].concat([value]); - } - } - else if (o[key] === undefined && isTypeArray) { - o[key] = isValueArray ? value : [value]; - } - else if (duplicate && !(o[key] === undefined || - checkAllAliases(key, flags.counts) || - checkAllAliases(key, flags.bools))) { - o[key] = [o[key], value]; - } - else { - o[key] = value; - } - } - function extendAliases(...args) { - args.forEach(function (obj) { - Object.keys(obj || {}).forEach(function (key) { - if (flags.aliases[key]) - return; - flags.aliases[key] = [].concat(aliases[key] || []); - flags.aliases[key].concat(key).forEach(function (x) { - if (/-/.test(x) && configuration['camel-case-expansion']) { - const c = camelCase(x); - if (c !== key && flags.aliases[key].indexOf(c) === -1) { - flags.aliases[key].push(c); - newAliases[c] = true; - } - } - }); - flags.aliases[key].concat(key).forEach(function (x) { - if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { - const c = decamelize(x, '-'); - if (c !== key && flags.aliases[key].indexOf(c) === -1) { - flags.aliases[key].push(c); - newAliases[c] = true; - } - } - }); - flags.aliases[key].forEach(function (x) { - flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { - return x !== y; - })); - }); - }); - }); - } - function checkAllAliases(key, flag) { - const toCheck = [].concat(flags.aliases[key] || [], key); - const keys = Object.keys(flag); - const setAlias = toCheck.find(key => keys.includes(key)); - return setAlias ? flag[setAlias] : false; - } - function hasAnyFlag(key) { - const flagsKeys = Object.keys(flags); - const toCheck = [].concat(flagsKeys.map(k => flags[k])); - return toCheck.some(function (flag) { - return Array.isArray(flag) ? flag.includes(key) : flag[key]; - }); - } - function hasFlagsMatching(arg, ...patterns) { - const toCheck = [].concat(...patterns); - return toCheck.some(function (pattern) { - const match = arg.match(pattern); - return match && hasAnyFlag(match[1]); - }); - } - function hasAllShortFlags(arg) { - if (arg.match(negative) || !arg.match(/^-[^-]+/)) { - return false; - } - let hasAllFlags = true; - let next; - const letters = arg.slice(1).split(''); - for (let j = 0; j < letters.length; j++) { - next = arg.slice(j + 2); - if (!hasAnyFlag(letters[j])) { - hasAllFlags = false; - break; - } - if ((letters[j + 1] && letters[j + 1] === '=') || - next === '-' || - (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || - (letters[j + 1] && letters[j + 1].match(/\W/))) { - break; - } - } - return hasAllFlags; - } - function isUnknownOptionAsArg(arg) { - return configuration['unknown-options-as-args'] && isUnknownOption(arg); - } - function isUnknownOption(arg) { - arg = arg.replace(/^-{3,}/, '--'); - if (arg.match(negative)) { - return false; - } - if (hasAllShortFlags(arg)) { - return false; - } - const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; - const normalFlag = /^-+([^=]+?)$/; - const flagEndingInHyphen = /^-+([^=]+?)-$/; - const flagEndingInDigits = /^-+([^=]+?\d+)$/; - const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; - return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); - } - function defaultValue(key) { - if (!checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts) && - `${key}` in defaults) { - return defaults[key]; - } - else { - return defaultForType(guessType(key)); - } - } - function defaultForType(type) { - const def = { - [DefaultValuesForTypeKey.BOOLEAN]: true, - [DefaultValuesForTypeKey.STRING]: '', - [DefaultValuesForTypeKey.NUMBER]: undefined, - [DefaultValuesForTypeKey.ARRAY]: [] - }; - return def[type]; - } - function guessType(key) { - let type = DefaultValuesForTypeKey.BOOLEAN; - if (checkAllAliases(key, flags.strings)) - type = DefaultValuesForTypeKey.STRING; - else if (checkAllAliases(key, flags.numbers)) - type = DefaultValuesForTypeKey.NUMBER; - else if (checkAllAliases(key, flags.bools)) - type = DefaultValuesForTypeKey.BOOLEAN; - else if (checkAllAliases(key, flags.arrays)) - type = DefaultValuesForTypeKey.ARRAY; - return type; - } - function isUndefined(num) { - return num === undefined; - } - function checkConfiguration() { - Object.keys(flags.counts).find(key => { - if (checkAllAliases(key, flags.arrays)) { - error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); - return true; - } - else if (checkAllAliases(key, flags.nargs)) { - error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); - return true; - } - return false; - }); - } - return { - aliases: Object.assign({}, flags.aliases), - argv: Object.assign(argvReturn, argv), - configuration: configuration, - defaulted: Object.assign({}, defaulted), - error: error, - newAliases: Object.assign({}, newAliases) - }; - } -} -function combineAliases(aliases) { - const aliasArrays = []; - const combined = Object.create(null); - let change = true; - Object.keys(aliases).forEach(function (key) { - aliasArrays.push([].concat(aliases[key], key)); - }); - while (change) { - change = false; - for (let i = 0; i < aliasArrays.length; i++) { - for (let ii = i + 1; ii < aliasArrays.length; ii++) { - const intersect = aliasArrays[i].filter(function (v) { - return aliasArrays[ii].indexOf(v) !== -1; - }); - if (intersect.length) { - aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); - aliasArrays.splice(ii, 1); - change = true; - break; - } - } - } - } - aliasArrays.forEach(function (aliasArray) { - aliasArray = aliasArray.filter(function (v, i, self) { - return self.indexOf(v) === i; - }); - const lastAlias = aliasArray.pop(); - if (lastAlias !== undefined && typeof lastAlias === 'string') { - combined[lastAlias] = aliasArray; - } - }); - return combined; -} -function increment(orig) { - return orig !== undefined ? orig + 1 : 1; -} -function sanitizeKey(key) { - if (key === '__proto__') - return '___proto___'; - return key; -} -function stripQuotes(val) { - return (typeof val === 'string' && - (val[0] === "'" || val[0] === '"') && - val[val.length - 1] === val[0]) - ? val.substring(1, val.length - 1) - : val; -} - -var _a, _b, _c; -const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) - ? Number(process.env.YARGS_MIN_NODE_VERSION) - : 12; -const nodeVersion = (_b = (_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1); -if (nodeVersion) { - const major = Number(nodeVersion.match(/^([^.]+)/)[1]); - if (major < minNodeVersion) { - throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); - } -} -const env = process ? process.env : {}; -const parser = new YargsParser({ - cwd: process.cwd, - env: () => { - return env; - }, - format: util.format, - normalize: path.normalize, - resolve: path.resolve, - require: (path) => { - if (typeof require !== 'undefined') { - return require(path); - } - else if (path.match(/\.json$/)) { - return JSON.parse(fs.readFileSync(path, 'utf8')); - } - else { - throw Error('only .json config files are supported in ESM'); - } - } -}); -const yargsParser = function Parser(args, opts) { - const result = parser.parse(args.slice(), opts); - return result.argv; -}; -yargsParser.detailed = function (args, opts) { - return parser.parse(args.slice(), opts); -}; -yargsParser.camelCase = camelCase; -yargsParser.decamelize = decamelize; -yargsParser.looksLikeNumber = looksLikeNumber; - -module.exports = yargsParser; diff --git a/node_modules/yargs-parser/build/lib/index.js b/node_modules/yargs-parser/build/lib/index.js deleted file mode 100644 index 43ef485..0000000 --- a/node_modules/yargs-parser/build/lib/index.js +++ /dev/null @@ -1,62 +0,0 @@ -/** - * @fileoverview Main entrypoint for libraries using yargs-parser in Node.js - * CJS and ESM environments. - * - * @license - * Copyright (c) 2016, Contributors - * SPDX-License-Identifier: ISC - */ -var _a, _b, _c; -import { format } from 'util'; -import { normalize, resolve } from 'path'; -import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; -import { YargsParser } from './yargs-parser.js'; -import { readFileSync } from 'fs'; -// See https://github.com/yargs/yargs-parser#supported-nodejs-versions for our -// version support policy. The YARGS_MIN_NODE_VERSION is used for testing only. -const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) - ? Number(process.env.YARGS_MIN_NODE_VERSION) - : 12; -const nodeVersion = (_b = (_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1); -if (nodeVersion) { - const major = Number(nodeVersion.match(/^([^.]+)/)[1]); - if (major < minNodeVersion) { - throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); - } -} -// Creates a yargs-parser instance using Node.js standard libraries: -const env = process ? process.env : {}; -const parser = new YargsParser({ - cwd: process.cwd, - env: () => { - return env; - }, - format, - normalize, - resolve, - // TODO: figure out a way to combine ESM and CJS coverage, such that - // we can exercise all the lines below: - require: (path) => { - if (typeof require !== 'undefined') { - return require(path); - } - else if (path.match(/\.json$/)) { - // Addresses: https://github.com/yargs/yargs/issues/2040 - return JSON.parse(readFileSync(path, 'utf8')); - } - else { - throw Error('only .json config files are supported in ESM'); - } - } -}); -const yargsParser = function Parser(args, opts) { - const result = parser.parse(args.slice(), opts); - return result.argv; -}; -yargsParser.detailed = function (args, opts) { - return parser.parse(args.slice(), opts); -}; -yargsParser.camelCase = camelCase; -yargsParser.decamelize = decamelize; -yargsParser.looksLikeNumber = looksLikeNumber; -export default yargsParser; diff --git a/node_modules/yargs-parser/build/lib/string-utils.js b/node_modules/yargs-parser/build/lib/string-utils.js deleted file mode 100644 index 4e8bd99..0000000 --- a/node_modules/yargs-parser/build/lib/string-utils.js +++ /dev/null @@ -1,65 +0,0 @@ -/** - * @license - * Copyright (c) 2016, Contributors - * SPDX-License-Identifier: ISC - */ -export function camelCase(str) { - // Handle the case where an argument is provided as camel case, e.g., fooBar. - // by ensuring that the string isn't already mixed case: - const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); - if (!isCamelCase) { - str = str.toLowerCase(); - } - if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { - return str; - } - else { - let camelcase = ''; - let nextChrUpper = false; - const leadingHyphens = str.match(/^-+/); - for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { - let chr = str.charAt(i); - if (nextChrUpper) { - nextChrUpper = false; - chr = chr.toUpperCase(); - } - if (i !== 0 && (chr === '-' || chr === '_')) { - nextChrUpper = true; - } - else if (chr !== '-' && chr !== '_') { - camelcase += chr; - } - } - return camelcase; - } -} -export function decamelize(str, joinString) { - const lowercase = str.toLowerCase(); - joinString = joinString || '-'; - let notCamelcase = ''; - for (let i = 0; i < str.length; i++) { - const chrLower = lowercase.charAt(i); - const chrString = str.charAt(i); - if (chrLower !== chrString && i > 0) { - notCamelcase += `${joinString}${lowercase.charAt(i)}`; - } - else { - notCamelcase += chrString; - } - } - return notCamelcase; -} -export function looksLikeNumber(x) { - if (x === null || x === undefined) - return false; - // if loaded from config, may already be a number. - if (typeof x === 'number') - return true; - // hexadecimal. - if (/^0x[0-9a-f]+$/i.test(x)) - return true; - // don't treat 0123 as a number; as it drops the leading '0'. - if (/^0[^.]/.test(x)) - return false; - return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); -} diff --git a/node_modules/yargs-parser/build/lib/tokenize-arg-string.js b/node_modules/yargs-parser/build/lib/tokenize-arg-string.js deleted file mode 100644 index 5e732ef..0000000 --- a/node_modules/yargs-parser/build/lib/tokenize-arg-string.js +++ /dev/null @@ -1,40 +0,0 @@ -/** - * @license - * Copyright (c) 2016, Contributors - * SPDX-License-Identifier: ISC - */ -// take an un-split argv string and tokenize it. -export function tokenizeArgString(argString) { - if (Array.isArray(argString)) { - return argString.map(e => typeof e !== 'string' ? e + '' : e); - } - argString = argString.trim(); - let i = 0; - let prevC = null; - let c = null; - let opening = null; - const args = []; - for (let ii = 0; ii < argString.length; ii++) { - prevC = c; - c = argString.charAt(ii); - // split on spaces unless we're in quotes. - if (c === ' ' && !opening) { - if (!(prevC === ' ')) { - i++; - } - continue; - } - // don't split the string if we're in matching - // opening or closing single and double quotes. - if (c === opening) { - opening = null; - } - else if ((c === "'" || c === '"') && !opening) { - opening = c; - } - if (!args[i]) - args[i] = ''; - args[i] += c; - } - return args; -} diff --git a/node_modules/yargs-parser/build/lib/yargs-parser-types.js b/node_modules/yargs-parser/build/lib/yargs-parser-types.js deleted file mode 100644 index 63b7c31..0000000 --- a/node_modules/yargs-parser/build/lib/yargs-parser-types.js +++ /dev/null @@ -1,12 +0,0 @@ -/** - * @license - * Copyright (c) 2016, Contributors - * SPDX-License-Identifier: ISC - */ -export var DefaultValuesForTypeKey; -(function (DefaultValuesForTypeKey) { - DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; - DefaultValuesForTypeKey["STRING"] = "string"; - DefaultValuesForTypeKey["NUMBER"] = "number"; - DefaultValuesForTypeKey["ARRAY"] = "array"; -})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); diff --git a/node_modules/yargs-parser/build/lib/yargs-parser.js b/node_modules/yargs-parser/build/lib/yargs-parser.js deleted file mode 100644 index 415d4bc..0000000 --- a/node_modules/yargs-parser/build/lib/yargs-parser.js +++ /dev/null @@ -1,1045 +0,0 @@ -/** - * @license - * Copyright (c) 2016, Contributors - * SPDX-License-Identifier: ISC - */ -import { tokenizeArgString } from './tokenize-arg-string.js'; -import { DefaultValuesForTypeKey } from './yargs-parser-types.js'; -import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; -let mixin; -export class YargsParser { - constructor(_mixin) { - mixin = _mixin; - } - parse(argsInput, options) { - const opts = Object.assign({ - alias: undefined, - array: undefined, - boolean: undefined, - config: undefined, - configObjects: undefined, - configuration: undefined, - coerce: undefined, - count: undefined, - default: undefined, - envPrefix: undefined, - narg: undefined, - normalize: undefined, - string: undefined, - number: undefined, - __: undefined, - key: undefined - }, options); - // allow a string argument to be passed in rather - // than an argv array. - const args = tokenizeArgString(argsInput); - // tokenizeArgString adds extra quotes to args if argsInput is a string - // only strip those extra quotes in processValue if argsInput is a string - const inputIsString = typeof argsInput === 'string'; - // aliases might have transitive relationships, normalize this. - const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); - const configuration = Object.assign({ - 'boolean-negation': true, - 'camel-case-expansion': true, - 'combine-arrays': false, - 'dot-notation': true, - 'duplicate-arguments-array': true, - 'flatten-duplicate-arrays': true, - 'greedy-arrays': true, - 'halt-at-non-option': false, - 'nargs-eats-options': false, - 'negation-prefix': 'no-', - 'parse-numbers': true, - 'parse-positional-numbers': true, - 'populate--': false, - 'set-placeholder-key': false, - 'short-option-groups': true, - 'strip-aliased': false, - 'strip-dashed': false, - 'unknown-options-as-args': false - }, opts.configuration); - const defaults = Object.assign(Object.create(null), opts.default); - const configObjects = opts.configObjects || []; - const envPrefix = opts.envPrefix; - const notFlagsOption = configuration['populate--']; - const notFlagsArgv = notFlagsOption ? '--' : '_'; - const newAliases = Object.create(null); - const defaulted = Object.create(null); - // allow a i18n handler to be passed in, default to a fake one (util.format). - const __ = opts.__ || mixin.format; - const flags = { - aliases: Object.create(null), - arrays: Object.create(null), - bools: Object.create(null), - strings: Object.create(null), - numbers: Object.create(null), - counts: Object.create(null), - normalize: Object.create(null), - configs: Object.create(null), - nargs: Object.create(null), - coercions: Object.create(null), - keys: [] - }; - const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; - const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); - [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { - const key = typeof opt === 'object' ? opt.key : opt; - // assign to flags[bools|strings|numbers] - const assignment = Object.keys(opt).map(function (key) { - const arrayFlagKeys = { - boolean: 'bools', - string: 'strings', - number: 'numbers' - }; - return arrayFlagKeys[key]; - }).filter(Boolean).pop(); - // assign key to be coerced - if (assignment) { - flags[assignment][key] = true; - } - flags.arrays[key] = true; - flags.keys.push(key); - }); - [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { - flags.bools[key] = true; - flags.keys.push(key); - }); - [].concat(opts.string || []).filter(Boolean).forEach(function (key) { - flags.strings[key] = true; - flags.keys.push(key); - }); - [].concat(opts.number || []).filter(Boolean).forEach(function (key) { - flags.numbers[key] = true; - flags.keys.push(key); - }); - [].concat(opts.count || []).filter(Boolean).forEach(function (key) { - flags.counts[key] = true; - flags.keys.push(key); - }); - [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { - flags.normalize[key] = true; - flags.keys.push(key); - }); - if (typeof opts.narg === 'object') { - Object.entries(opts.narg).forEach(([key, value]) => { - if (typeof value === 'number') { - flags.nargs[key] = value; - flags.keys.push(key); - } - }); - } - if (typeof opts.coerce === 'object') { - Object.entries(opts.coerce).forEach(([key, value]) => { - if (typeof value === 'function') { - flags.coercions[key] = value; - flags.keys.push(key); - } - }); - } - if (typeof opts.config !== 'undefined') { - if (Array.isArray(opts.config) || typeof opts.config === 'string') { - ; - [].concat(opts.config).filter(Boolean).forEach(function (key) { - flags.configs[key] = true; - }); - } - else if (typeof opts.config === 'object') { - Object.entries(opts.config).forEach(([key, value]) => { - if (typeof value === 'boolean' || typeof value === 'function') { - flags.configs[key] = value; - } - }); - } - } - // create a lookup table that takes into account all - // combinations of aliases: {f: ['foo'], foo: ['f']} - extendAliases(opts.key, aliases, opts.default, flags.arrays); - // apply default values to all aliases. - Object.keys(defaults).forEach(function (key) { - (flags.aliases[key] || []).forEach(function (alias) { - defaults[alias] = defaults[key]; - }); - }); - let error = null; - checkConfiguration(); - let notFlags = []; - const argv = Object.assign(Object.create(null), { _: [] }); - // TODO(bcoe): for the first pass at removing object prototype we didn't - // remove all prototypes from objects returned by this API, we might want - // to gradually move towards doing so. - const argvReturn = {}; - for (let i = 0; i < args.length; i++) { - const arg = args[i]; - const truncatedArg = arg.replace(/^-{3,}/, '---'); - let broken; - let key; - let letters; - let m; - let next; - let value; - // any unknown option (except for end-of-options, "--") - if (arg !== '--' && /^-/.test(arg) && isUnknownOptionAsArg(arg)) { - pushPositional(arg); - // ---, ---=, ----, etc, - } - else if (truncatedArg.match(/^---+(=|$)/)) { - // options without key name are invalid. - pushPositional(arg); - continue; - // -- separated by = - } - else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { - // Using [\s\S] instead of . because js doesn't support the - // 'dotall' regex modifier. See: - // http://stackoverflow.com/a/1068308/13216 - m = arg.match(/^--?([^=]+)=([\s\S]*)$/); - // arrays format = '--f=a b c' - if (m !== null && Array.isArray(m) && m.length >= 3) { - if (checkAllAliases(m[1], flags.arrays)) { - i = eatArray(i, m[1], args, m[2]); - } - else if (checkAllAliases(m[1], flags.nargs) !== false) { - // nargs format = '--f=monkey washing cat' - i = eatNargs(i, m[1], args, m[2]); - } - else { - setArg(m[1], m[2], true); - } - } - } - else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { - m = arg.match(negatedBoolean); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); - } - // -- separated by space. - } - else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { - m = arg.match(/^--?(.+)/); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - if (checkAllAliases(key, flags.arrays)) { - // array format = '--foo a b c' - i = eatArray(i, key, args); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - // nargs format = '--foo a b c' - // should be truthy even if: flags.nargs[key] === 0 - i = eatNargs(i, key, args); - } - else { - next = args[i + 1]; - if (next !== undefined && (!next.match(/^-/) || - next.match(negative)) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else if (/^(true|false)$/.test(next)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - // dot-notation flag separated by '='. - } - else if (arg.match(/^-.\..+=/)) { - m = arg.match(/^-([^=]+)=([\s\S]*)$/); - if (m !== null && Array.isArray(m) && m.length >= 3) { - setArg(m[1], m[2]); - } - // dot-notation flag separated by space. - } - else if (arg.match(/^-.\..+/) && !arg.match(negative)) { - next = args[i + 1]; - m = arg.match(/^-(.\..+)/); - if (m !== null && Array.isArray(m) && m.length >= 2) { - key = m[1]; - if (next !== undefined && !next.match(/^-/) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { - letters = arg.slice(1, -1).split(''); - broken = false; - for (let j = 0; j < letters.length; j++) { - next = arg.slice(j + 2); - if (letters[j + 1] && letters[j + 1] === '=') { - value = arg.slice(j + 3); - key = letters[j]; - if (checkAllAliases(key, flags.arrays)) { - // array format = '-f=a b c' - i = eatArray(i, key, args, value); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - // nargs format = '-f=monkey washing cat' - i = eatNargs(i, key, args, value); - } - else { - setArg(key, value); - } - broken = true; - break; - } - if (next === '-') { - setArg(letters[j], next); - continue; - } - // current letter is an alphabetic character and next value is a number - if (/[A-Za-z]/.test(letters[j]) && - /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && - checkAllAliases(next, flags.bools) === false) { - setArg(letters[j], next); - broken = true; - break; - } - if (letters[j + 1] && letters[j + 1].match(/\W/)) { - setArg(letters[j], next); - broken = true; - break; - } - else { - setArg(letters[j], defaultValue(letters[j])); - } - } - key = arg.slice(-1)[0]; - if (!broken && key !== '-') { - if (checkAllAliases(key, flags.arrays)) { - // array format = '-f a b c' - i = eatArray(i, key, args); - } - else if (checkAllAliases(key, flags.nargs) !== false) { - // nargs format = '-f a b c' - // should be truthy even if: flags.nargs[key] === 0 - i = eatNargs(i, key, args); - } - else { - next = args[i + 1]; - if (next !== undefined && (!/^(-|--)[^-]/.test(next) || - next.match(negative)) && - !checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts)) { - setArg(key, next); - i++; - } - else if (/^(true|false)$/.test(next)) { - setArg(key, next); - i++; - } - else { - setArg(key, defaultValue(key)); - } - } - } - } - else if (arg.match(/^-[0-9]$/) && - arg.match(negative) && - checkAllAliases(arg.slice(1), flags.bools)) { - // single-digit boolean alias, e.g: xargs -0 - key = arg.slice(1); - setArg(key, defaultValue(key)); - } - else if (arg === '--') { - notFlags = args.slice(i + 1); - break; - } - else if (configuration['halt-at-non-option']) { - notFlags = args.slice(i); - break; - } - else { - pushPositional(arg); - } - } - // order of precedence: - // 1. command line arg - // 2. value from env var - // 3. value from config file - // 4. value from config objects - // 5. configured default value - applyEnvVars(argv, true); // special case: check env vars that point to config file - applyEnvVars(argv, false); - setConfig(argv); - setConfigObjects(); - applyDefaultsAndAliases(argv, flags.aliases, defaults, true); - applyCoercions(argv); - if (configuration['set-placeholder-key']) - setPlaceholderKeys(argv); - // for any counts either not in args or without an explicit default, set to 0 - Object.keys(flags.counts).forEach(function (key) { - if (!hasKey(argv, key.split('.'))) - setArg(key, 0); - }); - // '--' defaults to undefined. - if (notFlagsOption && notFlags.length) - argv[notFlagsArgv] = []; - notFlags.forEach(function (key) { - argv[notFlagsArgv].push(key); - }); - if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { - Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { - delete argv[key]; - }); - } - if (configuration['strip-aliased']) { - ; - [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { - if (configuration['camel-case-expansion'] && alias.includes('-')) { - delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; - } - delete argv[alias]; - }); - } - // Push argument into positional array, applying numeric coercion: - function pushPositional(arg) { - const maybeCoercedNumber = maybeCoerceNumber('_', arg); - if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { - argv._.push(maybeCoercedNumber); - } - } - // how many arguments should we consume, based - // on the nargs option? - function eatNargs(i, key, args, argAfterEqualSign) { - let ii; - let toEat = checkAllAliases(key, flags.nargs); - // NaN has a special meaning for the array type, indicating that one or - // more values are expected. - toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; - if (toEat === 0) { - if (!isUndefined(argAfterEqualSign)) { - error = Error(__('Argument unexpected for: %s', key)); - } - setArg(key, defaultValue(key)); - return i; - } - let available = isUndefined(argAfterEqualSign) ? 0 : 1; - if (configuration['nargs-eats-options']) { - // classic behavior, yargs eats positional and dash arguments. - if (args.length - (i + 1) + available < toEat) { - error = Error(__('Not enough arguments following: %s', key)); - } - available = toEat; - } - else { - // nargs will not consume flag arguments, e.g., -abc, --foo, - // and terminates when one is observed. - for (ii = i + 1; ii < args.length; ii++) { - if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) - available++; - else - break; - } - if (available < toEat) - error = Error(__('Not enough arguments following: %s', key)); - } - let consumed = Math.min(available, toEat); - if (!isUndefined(argAfterEqualSign) && consumed > 0) { - setArg(key, argAfterEqualSign); - consumed--; - } - for (ii = i + 1; ii < (consumed + i + 1); ii++) { - setArg(key, args[ii]); - } - return (i + consumed); - } - // if an option is an array, eat all non-hyphenated arguments - // following it... YUM! - // e.g., --foo apple banana cat becomes ["apple", "banana", "cat"] - function eatArray(i, key, args, argAfterEqualSign) { - let argsToSet = []; - let next = argAfterEqualSign || args[i + 1]; - // If both array and nargs are configured, enforce the nargs count: - const nargsCount = checkAllAliases(key, flags.nargs); - if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { - argsToSet.push(true); - } - else if (isUndefined(next) || - (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { - // for keys without value ==> argsToSet remains an empty [] - // set user default value, if available - if (defaults[key] !== undefined) { - const defVal = defaults[key]; - argsToSet = Array.isArray(defVal) ? defVal : [defVal]; - } - } - else { - // value in --option=value is eaten as is - if (!isUndefined(argAfterEqualSign)) { - argsToSet.push(processValue(key, argAfterEqualSign, true)); - } - for (let ii = i + 1; ii < args.length; ii++) { - if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || - (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) - break; - next = args[ii]; - if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) - break; - i = ii; - argsToSet.push(processValue(key, next, inputIsString)); - } - } - // If both array and nargs are configured, create an error if less than - // nargs positionals were found. NaN has special meaning, indicating - // that at least one value is required (more are okay). - if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || - (isNaN(nargsCount) && argsToSet.length === 0))) { - error = Error(__('Not enough arguments following: %s', key)); - } - setArg(key, argsToSet); - return i; - } - function setArg(key, val, shouldStripQuotes = inputIsString) { - if (/-/.test(key) && configuration['camel-case-expansion']) { - const alias = key.split('.').map(function (prop) { - return camelCase(prop); - }).join('.'); - addNewAlias(key, alias); - } - const value = processValue(key, val, shouldStripQuotes); - const splitKey = key.split('.'); - setKey(argv, splitKey, value); - // handle populating aliases of the full key - if (flags.aliases[key]) { - flags.aliases[key].forEach(function (x) { - const keyProperties = x.split('.'); - setKey(argv, keyProperties, value); - }); - } - // handle populating aliases of the first element of the dot-notation key - if (splitKey.length > 1 && configuration['dot-notation']) { - ; - (flags.aliases[splitKey[0]] || []).forEach(function (x) { - let keyProperties = x.split('.'); - // expand alias with nested objects in key - const a = [].concat(splitKey); - a.shift(); // nuke the old key. - keyProperties = keyProperties.concat(a); - // populate alias only if is not already an alias of the full key - // (already populated above) - if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { - setKey(argv, keyProperties, value); - } - }); - } - // Set normalize getter and setter when key is in 'normalize' but isn't an array - if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { - const keys = [key].concat(flags.aliases[key] || []); - keys.forEach(function (key) { - Object.defineProperty(argvReturn, key, { - enumerable: true, - get() { - return val; - }, - set(value) { - val = typeof value === 'string' ? mixin.normalize(value) : value; - } - }); - }); - } - } - function addNewAlias(key, alias) { - if (!(flags.aliases[key] && flags.aliases[key].length)) { - flags.aliases[key] = [alias]; - newAliases[alias] = true; - } - if (!(flags.aliases[alias] && flags.aliases[alias].length)) { - addNewAlias(alias, key); - } - } - function processValue(key, val, shouldStripQuotes) { - // strings may be quoted, clean this up as we assign values. - if (shouldStripQuotes) { - val = stripQuotes(val); - } - // handle parsing boolean arguments --foo=true --bar false. - if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { - if (typeof val === 'string') - val = val === 'true'; - } - let value = Array.isArray(val) - ? val.map(function (v) { return maybeCoerceNumber(key, v); }) - : maybeCoerceNumber(key, val); - // increment a count given as arg (either no value or value parsed as boolean) - if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { - value = increment(); - } - // Set normalized value when key is in 'normalize' and in 'arrays' - if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { - if (Array.isArray(val)) - value = val.map((val) => { return mixin.normalize(val); }); - else - value = mixin.normalize(val); - } - return value; - } - function maybeCoerceNumber(key, value) { - if (!configuration['parse-positional-numbers'] && key === '_') - return value; - if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { - const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); - if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { - value = Number(value); - } - } - return value; - } - // set args from config.json file, this should be - // applied last so that defaults can be applied. - function setConfig(argv) { - const configLookup = Object.create(null); - // expand defaults/aliases, in-case any happen to reference - // the config.json file. - applyDefaultsAndAliases(configLookup, flags.aliases, defaults); - Object.keys(flags.configs).forEach(function (configKey) { - const configPath = argv[configKey] || configLookup[configKey]; - if (configPath) { - try { - let config = null; - const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); - const resolveConfig = flags.configs[configKey]; - if (typeof resolveConfig === 'function') { - try { - config = resolveConfig(resolvedConfigPath); - } - catch (e) { - config = e; - } - if (config instanceof Error) { - error = config; - return; - } - } - else { - config = mixin.require(resolvedConfigPath); - } - setConfigObject(config); - } - catch (ex) { - // Deno will receive a PermissionDenied error if an attempt is - // made to load config without the --allow-read flag: - if (ex.name === 'PermissionDenied') - error = ex; - else if (argv[configKey]) - error = Error(__('Invalid JSON config file: %s', configPath)); - } - } - }); - } - // set args from config object. - // it recursively checks nested objects. - function setConfigObject(config, prev) { - Object.keys(config).forEach(function (key) { - const value = config[key]; - const fullKey = prev ? prev + '.' + key : key; - // if the value is an inner object and we have dot-notation - // enabled, treat inner objects in config the same as - // heavily nested dot notations (foo.bar.apple). - if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { - // if the value is an object but not an array, check nested object - setConfigObject(value, fullKey); - } - else { - // setting arguments via CLI takes precedence over - // values within the config file. - if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { - setArg(fullKey, value); - } - } - }); - } - // set all config objects passed in opts - function setConfigObjects() { - if (typeof configObjects !== 'undefined') { - configObjects.forEach(function (configObject) { - setConfigObject(configObject); - }); - } - } - function applyEnvVars(argv, configOnly) { - if (typeof envPrefix === 'undefined') - return; - const prefix = typeof envPrefix === 'string' ? envPrefix : ''; - const env = mixin.env(); - Object.keys(env).forEach(function (envVar) { - if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { - // get array of nested keys and convert them to camel case - const keys = envVar.split('__').map(function (key, i) { - if (i === 0) { - key = key.substring(prefix.length); - } - return camelCase(key); - }); - if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { - setArg(keys.join('.'), env[envVar]); - } - } - }); - } - function applyCoercions(argv) { - let coerce; - const applied = new Set(); - Object.keys(argv).forEach(function (key) { - if (!applied.has(key)) { // If we haven't already coerced this option via one of its aliases - coerce = checkAllAliases(key, flags.coercions); - if (typeof coerce === 'function') { - try { - const value = maybeCoerceNumber(key, coerce(argv[key])); - ([].concat(flags.aliases[key] || [], key)).forEach(ali => { - applied.add(ali); - argv[ali] = value; - }); - } - catch (err) { - error = err; - } - } - } - }); - } - function setPlaceholderKeys(argv) { - flags.keys.forEach((key) => { - // don't set placeholder keys for dot notation options 'foo.bar'. - if (~key.indexOf('.')) - return; - if (typeof argv[key] === 'undefined') - argv[key] = undefined; - }); - return argv; - } - function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { - Object.keys(defaults).forEach(function (key) { - if (!hasKey(obj, key.split('.'))) { - setKey(obj, key.split('.'), defaults[key]); - if (canLog) - defaulted[key] = true; - (aliases[key] || []).forEach(function (x) { - if (hasKey(obj, x.split('.'))) - return; - setKey(obj, x.split('.'), defaults[key]); - }); - } - }); - } - function hasKey(obj, keys) { - let o = obj; - if (!configuration['dot-notation']) - keys = [keys.join('.')]; - keys.slice(0, -1).forEach(function (key) { - o = (o[key] || {}); - }); - const key = keys[keys.length - 1]; - if (typeof o !== 'object') - return false; - else - return key in o; - } - function setKey(obj, keys, value) { - let o = obj; - if (!configuration['dot-notation']) - keys = [keys.join('.')]; - keys.slice(0, -1).forEach(function (key) { - // TODO(bcoe): in the next major version of yargs, switch to - // Object.create(null) for dot notation: - key = sanitizeKey(key); - if (typeof o === 'object' && o[key] === undefined) { - o[key] = {}; - } - if (typeof o[key] !== 'object' || Array.isArray(o[key])) { - // ensure that o[key] is an array, and that the last item is an empty object. - if (Array.isArray(o[key])) { - o[key].push({}); - } - else { - o[key] = [o[key], {}]; - } - // we want to update the empty object at the end of the o[key] array, so set o to that object - o = o[key][o[key].length - 1]; - } - else { - o = o[key]; - } - }); - // TODO(bcoe): in the next major version of yargs, switch to - // Object.create(null) for dot notation: - const key = sanitizeKey(keys[keys.length - 1]); - const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); - const isValueArray = Array.isArray(value); - let duplicate = configuration['duplicate-arguments-array']; - // nargs has higher priority than duplicate - if (!duplicate && checkAllAliases(key, flags.nargs)) { - duplicate = true; - if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { - o[key] = undefined; - } - } - if (value === increment()) { - o[key] = increment(o[key]); - } - else if (Array.isArray(o[key])) { - if (duplicate && isTypeArray && isValueArray) { - o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); - } - else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { - o[key] = value; - } - else { - o[key] = o[key].concat([value]); - } - } - else if (o[key] === undefined && isTypeArray) { - o[key] = isValueArray ? value : [value]; - } - else if (duplicate && !(o[key] === undefined || - checkAllAliases(key, flags.counts) || - checkAllAliases(key, flags.bools))) { - o[key] = [o[key], value]; - } - else { - o[key] = value; - } - } - // extend the aliases list with inferred aliases. - function extendAliases(...args) { - args.forEach(function (obj) { - Object.keys(obj || {}).forEach(function (key) { - // short-circuit if we've already added a key - // to the aliases array, for example it might - // exist in both 'opts.default' and 'opts.key'. - if (flags.aliases[key]) - return; - flags.aliases[key] = [].concat(aliases[key] || []); - // For "--option-name", also set argv.optionName - flags.aliases[key].concat(key).forEach(function (x) { - if (/-/.test(x) && configuration['camel-case-expansion']) { - const c = camelCase(x); - if (c !== key && flags.aliases[key].indexOf(c) === -1) { - flags.aliases[key].push(c); - newAliases[c] = true; - } - } - }); - // For "--optionName", also set argv['option-name'] - flags.aliases[key].concat(key).forEach(function (x) { - if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { - const c = decamelize(x, '-'); - if (c !== key && flags.aliases[key].indexOf(c) === -1) { - flags.aliases[key].push(c); - newAliases[c] = true; - } - } - }); - flags.aliases[key].forEach(function (x) { - flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { - return x !== y; - })); - }); - }); - }); - } - function checkAllAliases(key, flag) { - const toCheck = [].concat(flags.aliases[key] || [], key); - const keys = Object.keys(flag); - const setAlias = toCheck.find(key => keys.includes(key)); - return setAlias ? flag[setAlias] : false; - } - function hasAnyFlag(key) { - const flagsKeys = Object.keys(flags); - const toCheck = [].concat(flagsKeys.map(k => flags[k])); - return toCheck.some(function (flag) { - return Array.isArray(flag) ? flag.includes(key) : flag[key]; - }); - } - function hasFlagsMatching(arg, ...patterns) { - const toCheck = [].concat(...patterns); - return toCheck.some(function (pattern) { - const match = arg.match(pattern); - return match && hasAnyFlag(match[1]); - }); - } - // based on a simplified version of the short flag group parsing logic - function hasAllShortFlags(arg) { - // if this is a negative number, or doesn't start with a single hyphen, it's not a short flag group - if (arg.match(negative) || !arg.match(/^-[^-]+/)) { - return false; - } - let hasAllFlags = true; - let next; - const letters = arg.slice(1).split(''); - for (let j = 0; j < letters.length; j++) { - next = arg.slice(j + 2); - if (!hasAnyFlag(letters[j])) { - hasAllFlags = false; - break; - } - if ((letters[j + 1] && letters[j + 1] === '=') || - next === '-' || - (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || - (letters[j + 1] && letters[j + 1].match(/\W/))) { - break; - } - } - return hasAllFlags; - } - function isUnknownOptionAsArg(arg) { - return configuration['unknown-options-as-args'] && isUnknownOption(arg); - } - function isUnknownOption(arg) { - arg = arg.replace(/^-{3,}/, '--'); - // ignore negative numbers - if (arg.match(negative)) { - return false; - } - // if this is a short option group and all of them are configured, it isn't unknown - if (hasAllShortFlags(arg)) { - return false; - } - // e.g. '--count=2' - const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; - // e.g. '-a' or '--arg' - const normalFlag = /^-+([^=]+?)$/; - // e.g. '-a-' - const flagEndingInHyphen = /^-+([^=]+?)-$/; - // e.g. '-abc123' - const flagEndingInDigits = /^-+([^=]+?\d+)$/; - // e.g. '-a/usr/local' - const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; - // check the different types of flag styles, including negatedBoolean, a pattern defined near the start of the parse method - return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); - } - // make a best effort to pick a default value - // for an option based on name and type. - function defaultValue(key) { - if (!checkAllAliases(key, flags.bools) && - !checkAllAliases(key, flags.counts) && - `${key}` in defaults) { - return defaults[key]; - } - else { - return defaultForType(guessType(key)); - } - } - // return a default value, given the type of a flag., - function defaultForType(type) { - const def = { - [DefaultValuesForTypeKey.BOOLEAN]: true, - [DefaultValuesForTypeKey.STRING]: '', - [DefaultValuesForTypeKey.NUMBER]: undefined, - [DefaultValuesForTypeKey.ARRAY]: [] - }; - return def[type]; - } - // given a flag, enforce a default type. - function guessType(key) { - let type = DefaultValuesForTypeKey.BOOLEAN; - if (checkAllAliases(key, flags.strings)) - type = DefaultValuesForTypeKey.STRING; - else if (checkAllAliases(key, flags.numbers)) - type = DefaultValuesForTypeKey.NUMBER; - else if (checkAllAliases(key, flags.bools)) - type = DefaultValuesForTypeKey.BOOLEAN; - else if (checkAllAliases(key, flags.arrays)) - type = DefaultValuesForTypeKey.ARRAY; - return type; - } - function isUndefined(num) { - return num === undefined; - } - // check user configuration settings for inconsistencies - function checkConfiguration() { - // count keys should not be set as array/narg - Object.keys(flags.counts).find(key => { - if (checkAllAliases(key, flags.arrays)) { - error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); - return true; - } - else if (checkAllAliases(key, flags.nargs)) { - error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); - return true; - } - return false; - }); - } - return { - aliases: Object.assign({}, flags.aliases), - argv: Object.assign(argvReturn, argv), - configuration: configuration, - defaulted: Object.assign({}, defaulted), - error: error, - newAliases: Object.assign({}, newAliases) - }; - } -} -// if any aliases reference each other, we should -// merge them together. -function combineAliases(aliases) { - const aliasArrays = []; - const combined = Object.create(null); - let change = true; - // turn alias lookup hash {key: ['alias1', 'alias2']} into - // a simple array ['key', 'alias1', 'alias2'] - Object.keys(aliases).forEach(function (key) { - aliasArrays.push([].concat(aliases[key], key)); - }); - // combine arrays until zero changes are - // made in an iteration. - while (change) { - change = false; - for (let i = 0; i < aliasArrays.length; i++) { - for (let ii = i + 1; ii < aliasArrays.length; ii++) { - const intersect = aliasArrays[i].filter(function (v) { - return aliasArrays[ii].indexOf(v) !== -1; - }); - if (intersect.length) { - aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); - aliasArrays.splice(ii, 1); - change = true; - break; - } - } - } - } - // map arrays back to the hash-lookup (de-dupe while - // we're at it). - aliasArrays.forEach(function (aliasArray) { - aliasArray = aliasArray.filter(function (v, i, self) { - return self.indexOf(v) === i; - }); - const lastAlias = aliasArray.pop(); - if (lastAlias !== undefined && typeof lastAlias === 'string') { - combined[lastAlias] = aliasArray; - } - }); - return combined; -} -// this function should only be called when a count is given as an arg -// it is NOT called to set a default value -// thus we can start the count at 1 instead of 0 -function increment(orig) { - return orig !== undefined ? orig + 1 : 1; -} -// TODO(bcoe): in the next major version of yargs, switch to -// Object.create(null) for dot notation: -function sanitizeKey(key) { - if (key === '__proto__') - return '___proto___'; - return key; -} -function stripQuotes(val) { - return (typeof val === 'string' && - (val[0] === "'" || val[0] === '"') && - val[val.length - 1] === val[0]) - ? val.substring(1, val.length - 1) - : val; -} diff --git a/node_modules/yargs-parser/package.json b/node_modules/yargs-parser/package.json deleted file mode 100644 index decd0c3..0000000 --- a/node_modules/yargs-parser/package.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "name": "yargs-parser", - "version": "21.1.1", - "description": "the mighty option parser used by yargs", - "main": "build/index.cjs", - "exports": { - ".": [ - { - "import": "./build/lib/index.js", - "require": "./build/index.cjs" - }, - "./build/index.cjs" - ], - "./browser": [ - "./browser.js" - ] - }, - "type": "module", - "module": "./build/lib/index.js", - "scripts": { - "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", - "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", - "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", - "test": "c8 --reporter=text --reporter=html mocha test/*.cjs", - "test:esm": "c8 --reporter=text --reporter=html mocha test/*.mjs", - "test:browser": "start-server-and-test 'serve ./ -p 8080' http://127.0.0.1:8080/package.json 'node ./test/browser/yargs-test.cjs'", - "pretest:typescript": "npm run pretest", - "test:typescript": "c8 mocha ./build/test/typescript/*.js", - "coverage": "c8 report --check-coverage", - "precompile": "rimraf build", - "compile": "tsc", - "postcompile": "npm run build:cjs", - "build:cjs": "rollup -c", - "prepare": "npm run compile" - }, - "repository": { - "type": "git", - "url": "https://github.com/yargs/yargs-parser.git" - }, - "keywords": [ - "argument", - "parser", - "yargs", - "command", - "cli", - "parsing", - "option", - "args", - "argument" - ], - "author": "Ben Coe ", - "license": "ISC", - "devDependencies": { - "@types/chai": "^4.2.11", - "@types/mocha": "^9.0.0", - "@types/node": "^16.11.4", - "@typescript-eslint/eslint-plugin": "^3.10.1", - "@typescript-eslint/parser": "^3.10.1", - "c8": "^7.3.0", - "chai": "^4.2.0", - "cross-env": "^7.0.2", - "eslint": "^7.0.0", - "eslint-plugin-import": "^2.20.1", - "eslint-plugin-node": "^11.0.0", - "gts": "^3.0.0", - "mocha": "^10.0.0", - "puppeteer": "^16.0.0", - "rimraf": "^3.0.2", - "rollup": "^2.22.1", - "rollup-plugin-cleanup": "^3.1.1", - "rollup-plugin-ts": "^3.0.2", - "serve": "^14.0.0", - "standardx": "^7.0.0", - "start-server-and-test": "^1.11.2", - "ts-transform-default-export": "^1.0.2", - "typescript": "^4.0.0" - }, - "files": [ - "browser.js", - "build", - "!*.d.ts", - "!*.d.cts" - ], - "engines": { - "node": ">=12" - }, - "standardx": { - "ignore": [ - "build" - ] - } -} diff --git a/node_modules/yargs/LICENSE b/node_modules/yargs/LICENSE deleted file mode 100644 index b0145ca..0000000 --- a/node_modules/yargs/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright 2010 James Halliday (mail@substack.net); Modified work Copyright 2014 Contributors (ben@npmjs.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/yargs/README.md b/node_modules/yargs/README.md deleted file mode 100644 index 51f5b22..0000000 --- a/node_modules/yargs/README.md +++ /dev/null @@ -1,204 +0,0 @@ -

    - -

    -

    Yargs

    -

    - Yargs be a node.js library fer hearties tryin' ter parse optstrings -

    - -
    - -![ci](https://github.com/yargs/yargs/workflows/ci/badge.svg) -[![NPM version][npm-image]][npm-url] -[![js-standard-style][standard-image]][standard-url] -[![Coverage][coverage-image]][coverage-url] -[![Conventional Commits][conventional-commits-image]][conventional-commits-url] -[![Slack][slack-image]][slack-url] - -## Description -Yargs helps you build interactive command line tools, by parsing arguments and generating an elegant user interface. - -It gives you: - -* commands and (grouped) options (`my-program.js serve --port=5000`). -* a dynamically generated help menu based on your arguments: - -``` -mocha [spec..] - -Run tests with Mocha - -Commands - mocha inspect [spec..] Run tests with Mocha [default] - mocha init create a client-side Mocha setup at - -Rules & Behavior - --allow-uncaught Allow uncaught errors to propagate [boolean] - --async-only, -A Require all tests to use a callback (async) or - return a Promise [boolean] -``` - -* bash-completion shortcuts for commands and options. -* and [tons more](/docs/api.md). - -## Installation - -Stable version: -```bash -npm i yargs -``` - -Bleeding edge version with the most recent features: -```bash -npm i yargs@next -``` - -## Usage - -### Simple Example - -```javascript -#!/usr/bin/env node -const yargs = require('yargs/yargs') -const { hideBin } = require('yargs/helpers') -const argv = yargs(hideBin(process.argv)).argv - -if (argv.ships > 3 && argv.distance < 53.5) { - console.log('Plunder more riffiwobbles!') -} else { - console.log('Retreat from the xupptumblers!') -} -``` - -```bash -$ ./plunder.js --ships=4 --distance=22 -Plunder more riffiwobbles! - -$ ./plunder.js --ships 12 --distance 98.7 -Retreat from the xupptumblers! -``` - -> Note: `hideBin` is a shorthand for [`process.argv.slice(2)`](https://nodejs.org/en/knowledge/command-line/how-to-parse-command-line-arguments/). It has the benefit that it takes into account variations in some environments, e.g., [Electron](https://github.com/electron/electron/issues/4690). - -### Complex Example - -```javascript -#!/usr/bin/env node -const yargs = require('yargs/yargs') -const { hideBin } = require('yargs/helpers') - -yargs(hideBin(process.argv)) - .command('serve [port]', 'start the server', (yargs) => { - return yargs - .positional('port', { - describe: 'port to bind on', - default: 5000 - }) - }, (argv) => { - if (argv.verbose) console.info(`start server on :${argv.port}`) - serve(argv.port) - }) - .option('verbose', { - alias: 'v', - type: 'boolean', - description: 'Run with verbose logging' - }) - .parse() -``` - -Run the example above with `--help` to see the help for the application. - -## Supported Platforms - -### TypeScript - -yargs has type definitions at [@types/yargs][type-definitions]. - -``` -npm i @types/yargs --save-dev -``` - -See usage examples in [docs](/docs/typescript.md). - -### Deno - -As of `v16`, `yargs` supports [Deno](https://github.com/denoland/deno): - -```typescript -import yargs from 'https://deno.land/x/yargs/deno.ts' -import { Arguments } from 'https://deno.land/x/yargs/deno-types.ts' - -yargs(Deno.args) - .command('download ', 'download a list of files', (yargs: any) => { - return yargs.positional('files', { - describe: 'a list of files to do something with' - }) - }, (argv: Arguments) => { - console.info(argv) - }) - .strictCommands() - .demandCommand(1) - .parse() -``` - -### ESM - -As of `v16`,`yargs` supports ESM imports: - -```js -import yargs from 'yargs' -import { hideBin } from 'yargs/helpers' - -yargs(hideBin(process.argv)) - .command('curl ', 'fetch the contents of the URL', () => {}, (argv) => { - console.info(argv) - }) - .demandCommand(1) - .parse() -``` - -### Usage in Browser - -See examples of using yargs in the browser in [docs](/docs/browser.md). - -## Community - -Having problems? want to contribute? join our [community slack](http://devtoolscommunity.herokuapp.com). - -## Documentation - -### Table of Contents - -* [Yargs' API](/docs/api.md) -* [Examples](/docs/examples.md) -* [Parsing Tricks](/docs/tricks.md) - * [Stop the Parser](/docs/tricks.md#stop) - * [Negating Boolean Arguments](/docs/tricks.md#negate) - * [Numbers](/docs/tricks.md#numbers) - * [Arrays](/docs/tricks.md#arrays) - * [Objects](/docs/tricks.md#objects) - * [Quotes](/docs/tricks.md#quotes) -* [Advanced Topics](/docs/advanced.md) - * [Composing Your App Using Commands](/docs/advanced.md#commands) - * [Building Configurable CLI Apps](/docs/advanced.md#configuration) - * [Customizing Yargs' Parser](/docs/advanced.md#customizing) - * [Bundling yargs](/docs/bundling.md) -* [Contributing](/contributing.md) - -## Supported Node.js Versions - -Libraries in this ecosystem make a best effort to track -[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a -post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a). - -[npm-url]: https://www.npmjs.com/package/yargs -[npm-image]: https://img.shields.io/npm/v/yargs.svg -[standard-image]: https://img.shields.io/badge/code%20style-standard-brightgreen.svg -[standard-url]: http://standardjs.com/ -[conventional-commits-image]: https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg -[conventional-commits-url]: https://conventionalcommits.org/ -[slack-image]: http://devtoolscommunity.herokuapp.com/badge.svg -[slack-url]: http://devtoolscommunity.herokuapp.com -[type-definitions]: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/yargs -[coverage-image]: https://img.shields.io/nycrc/yargs/yargs -[coverage-url]: https://github.com/yargs/yargs/blob/main/.nycrc diff --git a/node_modules/yargs/browser.d.ts b/node_modules/yargs/browser.d.ts deleted file mode 100644 index 21f3fc6..0000000 --- a/node_modules/yargs/browser.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import {YargsFactory} from './build/lib/yargs-factory'; - -declare const Yargs: ReturnType; - -export default Yargs; diff --git a/node_modules/yargs/browser.mjs b/node_modules/yargs/browser.mjs deleted file mode 100644 index 2d0d6e9..0000000 --- a/node_modules/yargs/browser.mjs +++ /dev/null @@ -1,7 +0,0 @@ -// Bootstrap yargs for browser: -import browserPlatformShim from './lib/platform-shims/browser.mjs'; -import {YargsFactory} from './build/lib/yargs-factory.js'; - -const Yargs = YargsFactory(browserPlatformShim); - -export default Yargs; diff --git a/node_modules/yargs/build/index.cjs b/node_modules/yargs/build/index.cjs deleted file mode 100644 index e9cf013..0000000 --- a/node_modules/yargs/build/index.cjs +++ /dev/null @@ -1 +0,0 @@ -"use strict";var t=require("assert");class e extends Error{constructor(t){super(t||"yargs error"),this.name="YError",Error.captureStackTrace&&Error.captureStackTrace(this,e)}}let s,i=[];function n(t,o,a,h){s=h;let l={};if(Object.prototype.hasOwnProperty.call(t,"extends")){if("string"!=typeof t.extends)return l;const r=/\.json|\..*rc$/.test(t.extends);let h=null;if(r)h=function(t,e){return s.path.resolve(t,e)}(o,t.extends);else try{h=require.resolve(t.extends)}catch(e){return t}!function(t){if(i.indexOf(t)>-1)throw new e(`Circular extended configurations: '${t}'.`)}(h),i.push(h),l=r?JSON.parse(s.readFileSync(h,"utf8")):require(t.extends),delete t.extends,l=n(l,s.path.dirname(h),a,s)}return i=[],a?r(l,t):Object.assign({},l,t)}function r(t,e){const s={};function i(t){return t&&"object"==typeof t&&!Array.isArray(t)}Object.assign(s,t);for(const n of Object.keys(e))i(e[n])&&i(s[n])?s[n]=r(t[n],e[n]):s[n]=e[n];return s}function o(t){const e=t.replace(/\s{2,}/g," ").split(/\s+(?![^[]*]|[^<]*>)/),s=/\.*[\][<>]/g,i=e.shift();if(!i)throw new Error(`No command found in: ${t}`);const n={cmd:i.replace(s,""),demanded:[],optional:[]};return e.forEach(((t,i)=>{let r=!1;t=t.replace(/\s/g,""),/\.+[\]>]/.test(t)&&i===e.length-1&&(r=!0),/^\[/.test(t)?n.optional.push({cmd:t.replace(s,"").split("|"),variadic:r}):n.demanded.push({cmd:t.replace(s,"").split("|"),variadic:r})})),n}const a=["first","second","third","fourth","fifth","sixth"];function h(t,s,i){try{let n=0;const[r,a,h]="object"==typeof t?[{demanded:[],optional:[]},t,s]:[o(`cmd ${t}`),s,i],f=[].slice.call(a);for(;f.length&&void 0===f[f.length-1];)f.pop();const d=h||f.length;if(du)throw new e(`Too many arguments provided. Expected max ${u} but received ${d}.`);r.demanded.forEach((t=>{const e=l(f.shift());0===t.cmd.filter((t=>t===e||"*"===t)).length&&c(e,t.cmd,n),n+=1})),r.optional.forEach((t=>{if(0===f.length)return;const e=l(f.shift());0===t.cmd.filter((t=>t===e||"*"===t)).length&&c(e,t.cmd,n),n+=1}))}catch(t){console.warn(t.stack)}}function l(t){return Array.isArray(t)?"array":null===t?"null":typeof t}function c(t,s,i){throw new e(`Invalid ${a[i]||"manyith"} argument. Expected ${s.join(" or ")} but received ${t}.`)}function f(t){return!!t&&!!t.then&&"function"==typeof t.then}function d(t,e,s,i){s.assert.notStrictEqual(t,e,i)}function u(t,e){e.assert.strictEqual(typeof t,"string")}function p(t){return Object.keys(t)}function g(t={},e=(()=>!0)){const s={};return p(t).forEach((i=>{e(i,t[i])&&(s[i]=t[i])})),s}function m(){return process.versions.electron&&!process.defaultApp?0:1}function y(){return process.argv[m()]}var b=Object.freeze({__proto__:null,hideBin:function(t){return t.slice(m()+1)},getProcessArgvBin:y});function v(t,e,s,i){if("a"===s&&!i)throw new TypeError("Private accessor was defined without a getter");if("function"==typeof e?t!==e||!i:!e.has(t))throw new TypeError("Cannot read private member from an object whose class did not declare it");return"m"===s?i:"a"===s?i.call(t):i?i.value:e.get(t)}function O(t,e,s,i,n){if("m"===i)throw new TypeError("Private method is not writable");if("a"===i&&!n)throw new TypeError("Private accessor was defined without a setter");if("function"==typeof e?t!==e||!n:!e.has(t))throw new TypeError("Cannot write private member to an object whose class did not declare it");return"a"===i?n.call(t,s):n?n.value=s:e.set(t,s),s}class w{constructor(t){this.globalMiddleware=[],this.frozens=[],this.yargs=t}addMiddleware(t,e,s=!0,i=!1){if(h(" [boolean] [boolean] [boolean]",[t,e,s],arguments.length),Array.isArray(t)){for(let i=0;i{const i=[...s[e]||[],e];return!t.option||!i.includes(t.option)})),t.option=e,this.addMiddleware(t,!0,!0,!0)}getMiddleware(){return this.globalMiddleware}freeze(){this.frozens.push([...this.globalMiddleware])}unfreeze(){const t=this.frozens.pop();void 0!==t&&(this.globalMiddleware=t)}reset(){this.globalMiddleware=this.globalMiddleware.filter((t=>t.global))}}function C(t,e,s,i){return s.reduce(((t,s)=>{if(s.applyBeforeValidation!==i)return t;if(s.mutates){if(s.applied)return t;s.applied=!0}if(f(t))return t.then((t=>Promise.all([t,s(t,e)]))).then((([t,e])=>Object.assign(t,e)));{const i=s(t,e);return f(i)?i.then((e=>Object.assign(t,e))):Object.assign(t,i)}}),t)}function j(t,e,s=(t=>{throw t})){try{const s="function"==typeof t?t():t;return f(s)?s.then((t=>e(t))):e(s)}catch(t){return s(t)}}const M=/(^\*)|(^\$0)/;class _{constructor(t,e,s,i){this.requireCache=new Set,this.handlers={},this.aliasMap={},this.frozens=[],this.shim=i,this.usage=t,this.globalMiddleware=s,this.validation=e}addDirectory(t,e,s,i){"boolean"!=typeof(i=i||{}).recurse&&(i.recurse=!1),Array.isArray(i.extensions)||(i.extensions=["js"]);const n="function"==typeof i.visit?i.visit:t=>t;i.visit=(t,e,s)=>{const i=n(t,e,s);if(i){if(this.requireCache.has(e))return i;this.requireCache.add(e),this.addHandler(i)}return i},this.shim.requireDirectory({require:e,filename:s},t,i)}addHandler(t,e,s,i,n,r){let a=[];const h=function(t){return t?t.map((t=>(t.applyBeforeValidation=!1,t))):[]}(n);if(i=i||(()=>{}),Array.isArray(t))if(function(t){return t.every((t=>"string"==typeof t))}(t))[t,...a]=t;else for(const e of t)this.addHandler(e);else{if(function(t){return"object"==typeof t&&!Array.isArray(t)}(t)){let e=Array.isArray(t.command)||"string"==typeof t.command?t.command:this.moduleName(t);return t.aliases&&(e=[].concat(e).concat(t.aliases)),void this.addHandler(e,this.extractDesc(t),t.builder,t.handler,t.middlewares,t.deprecated)}if(k(s))return void this.addHandler([t].concat(a),e,s.builder,s.handler,s.middlewares,s.deprecated)}if("string"==typeof t){const n=o(t);a=a.map((t=>o(t).cmd));let l=!1;const c=[n.cmd].concat(a).filter((t=>!M.test(t)||(l=!0,!1)));0===c.length&&l&&c.push("$0"),l&&(n.cmd=c[0],a=c.slice(1),t=t.replace(M,n.cmd)),a.forEach((t=>{this.aliasMap[t]=n.cmd})),!1!==e&&this.usage.command(t,e,l,a,r),this.handlers[n.cmd]={original:t,description:e,handler:i,builder:s||{},middlewares:h,deprecated:r,demanded:n.demanded,optional:n.optional},l&&(this.defaultCommand=this.handlers[n.cmd])}}getCommandHandlers(){return this.handlers}getCommands(){return Object.keys(this.handlers).concat(Object.keys(this.aliasMap))}hasDefaultCommand(){return!!this.defaultCommand}runCommand(t,e,s,i,n,r){const o=this.handlers[t]||this.handlers[this.aliasMap[t]]||this.defaultCommand,a=e.getInternalMethods().getContext(),h=a.commands.slice(),l=!t;t&&(a.commands.push(t),a.fullCommands.push(o.original));const c=this.applyBuilderUpdateUsageAndParse(l,o,e,s.aliases,h,i,n,r);return f(c)?c.then((t=>this.applyMiddlewareAndGetResult(l,o,t.innerArgv,a,n,t.aliases,e))):this.applyMiddlewareAndGetResult(l,o,c.innerArgv,a,n,c.aliases,e)}applyBuilderUpdateUsageAndParse(t,e,s,i,n,r,o,a){const h=e.builder;let l=s;if(x(h)){s.getInternalMethods().getUsageInstance().freeze();const c=h(s.getInternalMethods().reset(i),a);if(f(c))return c.then((i=>{var a;return l=(a=i)&&"function"==typeof a.getInternalMethods?i:s,this.parseAndUpdateUsage(t,e,l,n,r,o)}))}else(function(t){return"object"==typeof t})(h)&&(s.getInternalMethods().getUsageInstance().freeze(),l=s.getInternalMethods().reset(i),Object.keys(e.builder).forEach((t=>{l.option(t,h[t])})));return this.parseAndUpdateUsage(t,e,l,n,r,o)}parseAndUpdateUsage(t,e,s,i,n,r){t&&s.getInternalMethods().getUsageInstance().unfreeze(!0),this.shouldUpdateUsage(s)&&s.getInternalMethods().getUsageInstance().usage(this.usageFromParentCommandsCommandHandler(i,e),e.description);const o=s.getInternalMethods().runYargsParserAndExecuteCommands(null,void 0,!0,n,r);return f(o)?o.then((t=>({aliases:s.parsed.aliases,innerArgv:t}))):{aliases:s.parsed.aliases,innerArgv:o}}shouldUpdateUsage(t){return!t.getInternalMethods().getUsageInstance().getUsageDisabled()&&0===t.getInternalMethods().getUsageInstance().getUsage().length}usageFromParentCommandsCommandHandler(t,e){const s=M.test(e.original)?e.original.replace(M,"").trim():e.original,i=t.filter((t=>!M.test(t)));return i.push(s),`$0 ${i.join(" ")}`}handleValidationAndGetResult(t,e,s,i,n,r,o,a){if(!r.getInternalMethods().getHasOutput()){const e=r.getInternalMethods().runValidation(n,a,r.parsed.error,t);s=j(s,(t=>(e(t),t)))}if(e.handler&&!r.getInternalMethods().getHasOutput()){r.getInternalMethods().setHasOutput();const i=!!r.getOptions().configuration["populate--"];r.getInternalMethods().postProcess(s,i,!1,!1),s=j(s=C(s,r,o,!1),(t=>{const s=e.handler(t);return f(s)?s.then((()=>t)):t})),t||r.getInternalMethods().getUsageInstance().cacheHelpMessage(),f(s)&&!r.getInternalMethods().hasParseCallback()&&s.catch((t=>{try{r.getInternalMethods().getUsageInstance().fail(null,t)}catch(t){}}))}return t||(i.commands.pop(),i.fullCommands.pop()),s}applyMiddlewareAndGetResult(t,e,s,i,n,r,o){let a={};if(n)return s;o.getInternalMethods().getHasOutput()||(a=this.populatePositionals(e,s,i,o));const h=this.globalMiddleware.getMiddleware().slice(0).concat(e.middlewares),l=C(s,o,h,!0);return f(l)?l.then((s=>this.handleValidationAndGetResult(t,e,s,i,r,o,h,a))):this.handleValidationAndGetResult(t,e,l,i,r,o,h,a)}populatePositionals(t,e,s,i){e._=e._.slice(s.commands.length);const n=t.demanded.slice(0),r=t.optional.slice(0),o={};for(this.validation.positionalCount(n.length,e._.length);n.length;){const t=n.shift();this.populatePositional(t,e,o)}for(;r.length;){const t=r.shift();this.populatePositional(t,e,o)}return e._=s.commands.concat(e._.map((t=>""+t))),this.postProcessPositionals(e,o,this.cmdToParseOptions(t.original),i),o}populatePositional(t,e,s){const i=t.cmd[0];t.variadic?s[i]=e._.splice(0).map(String):e._.length&&(s[i]=[String(e._.shift())])}cmdToParseOptions(t){const e={array:[],default:{},alias:{},demand:{}},s=o(t);return s.demanded.forEach((t=>{const[s,...i]=t.cmd;t.variadic&&(e.array.push(s),e.default[s]=[]),e.alias[s]=i,e.demand[s]=!0})),s.optional.forEach((t=>{const[s,...i]=t.cmd;t.variadic&&(e.array.push(s),e.default[s]=[]),e.alias[s]=i})),e}postProcessPositionals(t,e,s,i){const n=Object.assign({},i.getOptions());n.default=Object.assign(s.default,n.default);for(const t of Object.keys(s.alias))n.alias[t]=(n.alias[t]||[]).concat(s.alias[t]);n.array=n.array.concat(s.array),n.config={};const r=[];if(Object.keys(e).forEach((t=>{e[t].map((e=>{n.configuration["unknown-options-as-args"]&&(n.key[t]=!0),r.push(`--${t}`),r.push(e)}))})),!r.length)return;const o=Object.assign({},n.configuration,{"populate--":!1}),a=this.shim.Parser.detailed(r,Object.assign({},n,{configuration:o}));if(a.error)i.getInternalMethods().getUsageInstance().fail(a.error.message,a.error);else{const s=Object.keys(e);Object.keys(e).forEach((t=>{s.push(...a.aliases[t])})),Object.keys(a.argv).forEach((n=>{s.includes(n)&&(e[n]||(e[n]=a.argv[n]),!this.isInConfigs(i,n)&&!this.isDefaulted(i,n)&&Object.prototype.hasOwnProperty.call(t,n)&&Object.prototype.hasOwnProperty.call(a.argv,n)&&(Array.isArray(t[n])||Array.isArray(a.argv[n]))?t[n]=[].concat(t[n],a.argv[n]):t[n]=a.argv[n])}))}}isDefaulted(t,e){const{default:s}=t.getOptions();return Object.prototype.hasOwnProperty.call(s,e)||Object.prototype.hasOwnProperty.call(s,this.shim.Parser.camelCase(e))}isInConfigs(t,e){const{configObjects:s}=t.getOptions();return s.some((t=>Object.prototype.hasOwnProperty.call(t,e)))||s.some((t=>Object.prototype.hasOwnProperty.call(t,this.shim.Parser.camelCase(e))))}runDefaultBuilderOn(t){if(!this.defaultCommand)return;if(this.shouldUpdateUsage(t)){const e=M.test(this.defaultCommand.original)?this.defaultCommand.original:this.defaultCommand.original.replace(/^[^[\]<>]*/,"$0 ");t.getInternalMethods().getUsageInstance().usage(e,this.defaultCommand.description)}const e=this.defaultCommand.builder;if(x(e))return e(t,!0);k(e)||Object.keys(e).forEach((s=>{t.option(s,e[s])}))}moduleName(t){const e=function(t){if("undefined"==typeof require)return null;for(let e,s=0,i=Object.keys(require.cache);s{const s=e;s._handle&&s.isTTY&&"function"==typeof s._handle.setBlocking&&s._handle.setBlocking(t)}))}function A(t){return"boolean"==typeof t}function P(t,s){const i=s.y18n.__,n={},r=[];n.failFn=function(t){r.push(t)};let o=null,a=null,h=!0;n.showHelpOnFail=function(e=!0,s){const[i,r]="string"==typeof e?[!0,e]:[e,s];return t.getInternalMethods().isGlobalContext()&&(a=r),o=r,h=i,n};let l=!1;n.fail=function(s,i){const c=t.getInternalMethods().getLoggerInstance();if(!r.length){if(t.getExitProcess()&&E(!0),!l){l=!0,h&&(t.showHelp("error"),c.error()),(s||i)&&c.error(s||i);const e=o||a;e&&((s||i)&&c.error(""),c.error(e))}if(i=i||new e(s),t.getExitProcess())return t.exit(1);if(t.getInternalMethods().hasParseCallback())return t.exit(1,i);throw i}for(let t=r.length-1;t>=0;--t){const e=r[t];if(A(e)){if(i)throw i;if(s)throw Error(s)}else e(s,i,n)}};let c=[],f=!1;n.usage=(t,e)=>null===t?(f=!0,c=[],n):(f=!1,c.push([t,e||""]),n),n.getUsage=()=>c,n.getUsageDisabled=()=>f,n.getPositionalGroupName=()=>i("Positionals:");let d=[];n.example=(t,e)=>{d.push([t,e||""])};let u=[];n.command=function(t,e,s,i,n=!1){s&&(u=u.map((t=>(t[2]=!1,t)))),u.push([t,e||"",s,i,n])},n.getCommands=()=>u;let p={};n.describe=function(t,e){Array.isArray(t)?t.forEach((t=>{n.describe(t,e)})):"object"==typeof t?Object.keys(t).forEach((e=>{n.describe(e,t[e])})):p[t]=e},n.getDescriptions=()=>p;let m=[];n.epilog=t=>{m.push(t)};let y,b=!1;n.wrap=t=>{b=!0,y=t},n.getWrap=()=>s.getEnv("YARGS_DISABLE_WRAP")?null:(b||(y=function(){const t=80;return s.process.stdColumns?Math.min(t,s.process.stdColumns):t}(),b=!0),y);const v="__yargsString__:";function O(t,e,i){let n=0;return Array.isArray(t)||(t=Object.values(t).map((t=>[t]))),t.forEach((t=>{n=Math.max(s.stringWidth(i?`${i} ${I(t[0])}`:I(t[0]))+$(t[0]),n)})),e&&(n=Math.min(n,parseInt((.5*e).toString(),10))),n}let w;function C(e){return t.getOptions().hiddenOptions.indexOf(e)<0||t.parsed.argv[t.getOptions().showHiddenOpt]}function j(t,e){let s=`[${i("default:")} `;if(void 0===t&&!e)return null;if(e)s+=e;else switch(typeof t){case"string":s+=`"${t}"`;break;case"object":s+=JSON.stringify(t);break;default:s+=t}return`${s}]`}n.deferY18nLookup=t=>v+t,n.help=function(){if(w)return w;!function(){const e=t.getDemandedOptions(),s=t.getOptions();(Object.keys(s.alias)||[]).forEach((i=>{s.alias[i].forEach((r=>{p[r]&&n.describe(i,p[r]),r in e&&t.demandOption(i,e[r]),s.boolean.includes(r)&&t.boolean(i),s.count.includes(r)&&t.count(i),s.string.includes(r)&&t.string(i),s.normalize.includes(r)&&t.normalize(i),s.array.includes(r)&&t.array(i),s.number.includes(r)&&t.number(i)}))}))}();const e=t.customScriptName?t.$0:s.path.basename(t.$0),r=t.getDemandedOptions(),o=t.getDemandedCommands(),a=t.getDeprecatedOptions(),h=t.getGroups(),l=t.getOptions();let g=[];g=g.concat(Object.keys(p)),g=g.concat(Object.keys(r)),g=g.concat(Object.keys(o)),g=g.concat(Object.keys(l.default)),g=g.filter(C),g=Object.keys(g.reduce(((t,e)=>("_"!==e&&(t[e]=!0),t)),{}));const y=n.getWrap(),b=s.cliui({width:y,wrap:!!y});if(!f)if(c.length)c.forEach((t=>{b.div({text:`${t[0].replace(/\$0/g,e)}`}),t[1]&&b.div({text:`${t[1]}`,padding:[1,0,0,0]})})),b.div();else if(u.length){let t=null;t=o._?`${e} <${i("command")}>\n`:`${e} [${i("command")}]\n`,b.div(`${t}`)}if(u.length>1||1===u.length&&!u[0][2]){b.div(i("Commands:"));const s=t.getInternalMethods().getContext(),n=s.commands.length?`${s.commands.join(" ")} `:"";!0===t.getInternalMethods().getParserConfiguration()["sort-commands"]&&(u=u.sort(((t,e)=>t[0].localeCompare(e[0]))));const r=e?`${e} `:"";u.forEach((t=>{const s=`${r}${n}${t[0].replace(/^\$0 ?/,"")}`;b.span({text:s,padding:[0,2,0,2],width:O(u,y,`${e}${n}`)+4},{text:t[1]});const o=[];t[2]&&o.push(`[${i("default")}]`),t[3]&&t[3].length&&o.push(`[${i("aliases:")} ${t[3].join(", ")}]`),t[4]&&("string"==typeof t[4]?o.push(`[${i("deprecated: %s",t[4])}]`):o.push(`[${i("deprecated")}]`)),o.length?b.div({text:o.join(" "),padding:[0,0,0,2],align:"right"}):b.div()})),b.div()}const M=(Object.keys(l.alias)||[]).concat(Object.keys(t.parsed.newAliases)||[]);g=g.filter((e=>!t.parsed.newAliases[e]&&M.every((t=>-1===(l.alias[t]||[]).indexOf(e)))));const _=i("Options:");h[_]||(h[_]=[]),function(t,e,s,i){let n=[],r=null;Object.keys(s).forEach((t=>{n=n.concat(s[t])})),t.forEach((t=>{r=[t].concat(e[t]),r.some((t=>-1!==n.indexOf(t)))||s[i].push(t)}))}(g,l.alias,h,_);const k=t=>/^--/.test(I(t)),x=Object.keys(h).filter((t=>h[t].length>0)).map((t=>({groupName:t,normalizedKeys:h[t].filter(C).map((t=>{if(M.includes(t))return t;for(let e,s=0;void 0!==(e=M[s]);s++)if((l.alias[e]||[]).includes(t))return e;return t}))}))).filter((({normalizedKeys:t})=>t.length>0)).map((({groupName:t,normalizedKeys:e})=>{const s=e.reduce(((e,s)=>(e[s]=[s].concat(l.alias[s]||[]).map((e=>t===n.getPositionalGroupName()?e:(/^[0-9]$/.test(e)?l.boolean.includes(s)?"-":"--":e.length>1?"--":"-")+e)).sort(((t,e)=>k(t)===k(e)?0:k(t)?1:-1)).join(", "),e)),{});return{groupName:t,normalizedKeys:e,switches:s}}));if(x.filter((({groupName:t})=>t!==n.getPositionalGroupName())).some((({normalizedKeys:t,switches:e})=>!t.every((t=>k(e[t])))))&&x.filter((({groupName:t})=>t!==n.getPositionalGroupName())).forEach((({normalizedKeys:t,switches:e})=>{t.forEach((t=>{var s,i;k(e[t])&&(e[t]=(s=e[t],i=4,S(s)?{text:s.text,indentation:s.indentation+i}:{text:s,indentation:i}))}))})),x.forEach((({groupName:e,normalizedKeys:s,switches:o})=>{b.div(e),s.forEach((e=>{const s=o[e];let h=p[e]||"",c=null;h.includes(v)&&(h=i(h.substring(16))),l.boolean.includes(e)&&(c=`[${i("boolean")}]`),l.count.includes(e)&&(c=`[${i("count")}]`),l.string.includes(e)&&(c=`[${i("string")}]`),l.normalize.includes(e)&&(c=`[${i("string")}]`),l.array.includes(e)&&(c=`[${i("array")}]`),l.number.includes(e)&&(c=`[${i("number")}]`);const f=[e in a?(d=a[e],"string"==typeof d?`[${i("deprecated: %s",d)}]`:`[${i("deprecated")}]`):null,c,e in r?`[${i("required")}]`:null,l.choices&&l.choices[e]?`[${i("choices:")} ${n.stringifiedValues(l.choices[e])}]`:null,j(l.default[e],l.defaultDescription[e])].filter(Boolean).join(" ");var d;b.span({text:I(s),padding:[0,2,0,2+$(s)],width:O(o,y)+4},h);const u=!0===t.getInternalMethods().getUsageConfiguration()["hide-types"];f&&!u?b.div({text:f,padding:[0,0,0,2],align:"right"}):b.div()})),b.div()})),d.length&&(b.div(i("Examples:")),d.forEach((t=>{t[0]=t[0].replace(/\$0/g,e)})),d.forEach((t=>{""===t[1]?b.div({text:t[0],padding:[0,2,0,2]}):b.div({text:t[0],padding:[0,2,0,2],width:O(d,y)+4},{text:t[1]})})),b.div()),m.length>0){const t=m.map((t=>t.replace(/\$0/g,e))).join("\n");b.div(`${t}\n`)}return b.toString().replace(/\s*$/,"")},n.cacheHelpMessage=function(){w=this.help()},n.clearCachedHelpMessage=function(){w=void 0},n.hasCachedHelpMessage=function(){return!!w},n.showHelp=e=>{const s=t.getInternalMethods().getLoggerInstance();e||(e="error");("function"==typeof e?e:s[e])(n.help())},n.functionDescription=t=>["(",t.name?s.Parser.decamelize(t.name,"-"):i("generated-value"),")"].join(""),n.stringifiedValues=function(t,e){let s="";const i=e||", ",n=[].concat(t);return t&&n.length?(n.forEach((t=>{s.length&&(s+=i),s+=JSON.stringify(t)})),s):s};let M=null;n.version=t=>{M=t},n.showVersion=e=>{const s=t.getInternalMethods().getLoggerInstance();e||(e="error");("function"==typeof e?e:s[e])(M)},n.reset=function(t){return o=null,l=!1,c=[],f=!1,m=[],d=[],u=[],p=g(p,(e=>!t[e])),n};const _=[];return n.freeze=function(){_.push({failMessage:o,failureOutput:l,usages:c,usageDisabled:f,epilogs:m,examples:d,commands:u,descriptions:p})},n.unfreeze=function(t=!1){const e=_.pop();e&&(t?(p={...e.descriptions,...p},u=[...e.commands,...u],c=[...e.usages,...c],d=[...e.examples,...d],m=[...e.epilogs,...m]):({failMessage:o,failureOutput:l,usages:c,usageDisabled:f,epilogs:m,examples:d,commands:u,descriptions:p}=e))},n}function S(t){return"object"==typeof t}function $(t){return S(t)?t.indentation:0}function I(t){return S(t)?t.text:t}class D{constructor(t,e,s,i){var n,r,o;this.yargs=t,this.usage=e,this.command=s,this.shim=i,this.completionKey="get-yargs-completions",this.aliases=null,this.customCompletionFunction=null,this.indexAfterLastReset=0,this.zshShell=null!==(o=(null===(n=this.shim.getEnv("SHELL"))||void 0===n?void 0:n.includes("zsh"))||(null===(r=this.shim.getEnv("ZSH_NAME"))||void 0===r?void 0:r.includes("zsh")))&&void 0!==o&&o}defaultCompletion(t,e,s,i){const n=this.command.getCommandHandlers();for(let e=0,s=t.length;e{const i=o(s[0]).cmd;if(-1===e.indexOf(i))if(this.zshShell){const e=s[1]||"";t.push(i.replace(/:/g,"\\:")+":"+e)}else t.push(i)}))}optionCompletions(t,e,s,i){if((i.match(/^-/)||""===i&&0===t.length)&&!this.previousArgHasChoices(e)){const s=this.yargs.getOptions(),n=this.yargs.getGroups()[this.usage.getPositionalGroupName()]||[];Object.keys(s.key).forEach((r=>{const o=!!s.configuration["boolean-negation"]&&s.boolean.includes(r);n.includes(r)||s.hiddenOptions.includes(r)||this.argsContainKey(e,r,o)||this.completeOptionKey(r,t,i,o&&!!s.default[r])}))}}choicesFromOptionsCompletions(t,e,s,i){if(this.previousArgHasChoices(e)){const s=this.getPreviousArgChoices(e);s&&s.length>0&&t.push(...s.map((t=>t.replace(/:/g,"\\:"))))}}choicesFromPositionalsCompletions(t,e,s,i){if(""===i&&t.length>0&&this.previousArgHasChoices(e))return;const n=this.yargs.getGroups()[this.usage.getPositionalGroupName()]||[],r=Math.max(this.indexAfterLastReset,this.yargs.getInternalMethods().getContext().commands.length+1),o=n[s._.length-r-1];if(!o)return;const a=this.yargs.getOptions().choices[o]||[];for(const e of a)e.startsWith(i)&&t.push(e.replace(/:/g,"\\:"))}getPreviousArgChoices(t){if(t.length<1)return;let e=t[t.length-1],s="";if(!e.startsWith("-")&&t.length>1&&(s=e,e=t[t.length-2]),!e.startsWith("-"))return;const i=e.replace(/^-+/,""),n=this.yargs.getOptions(),r=[i,...this.yargs.getAliases()[i]||[]];let o;for(const t of r)if(Object.prototype.hasOwnProperty.call(n.key,t)&&Array.isArray(n.choices[t])){o=n.choices[t];break}return o?o.filter((t=>!s||t.startsWith(s))):void 0}previousArgHasChoices(t){const e=this.getPreviousArgChoices(t);return void 0!==e&&e.length>0}argsContainKey(t,e,s){const i=e=>-1!==t.indexOf((/^[^0-9]$/.test(e)?"-":"--")+e);if(i(e))return!0;if(s&&i(`no-${e}`))return!0;if(this.aliases)for(const t of this.aliases[e])if(i(t))return!0;return!1}completeOptionKey(t,e,s,i){var n,r,o,a;let h=t;if(this.zshShell){const e=this.usage.getDescriptions(),s=null===(r=null===(n=null==this?void 0:this.aliases)||void 0===n?void 0:n[t])||void 0===r?void 0:r.find((t=>{const s=e[t];return"string"==typeof s&&s.length>0})),i=s?e[s]:void 0,l=null!==(a=null!==(o=e[t])&&void 0!==o?o:i)&&void 0!==a?a:"";h=`${t.replace(/:/g,"\\:")}:${l.replace("__yargsString__:","").replace(/(\r\n|\n|\r)/gm," ")}`}const l=!/^--/.test(s)&&(t=>/^[^0-9]$/.test(t))(t)?"-":"--";e.push(l+h),i&&e.push(l+"no-"+h)}customCompletion(t,e,s,i){if(d(this.customCompletionFunction,null,this.shim),this.customCompletionFunction.length<3){const t=this.customCompletionFunction(s,e);return f(t)?t.then((t=>{this.shim.process.nextTick((()=>{i(null,t)}))})).catch((t=>{this.shim.process.nextTick((()=>{i(t,void 0)}))})):i(null,t)}return function(t){return t.length>3}(this.customCompletionFunction)?this.customCompletionFunction(s,e,((n=i)=>this.defaultCompletion(t,e,s,n)),(t=>{i(null,t)})):this.customCompletionFunction(s,e,(t=>{i(null,t)}))}getCompletion(t,e){const s=t.length?t[t.length-1]:"",i=this.yargs.parse(t,!0),n=this.customCompletionFunction?i=>this.customCompletion(t,i,s,e):i=>this.defaultCompletion(t,i,s,e);return f(i)?i.then(n):n(i)}generateCompletionScript(t,e){let s=this.zshShell?'#compdef {{app_name}}\n###-begin-{{app_name}}-completions-###\n#\n# yargs command completion script\n#\n# Installation: {{app_path}} {{completion_command}} >> ~/.zshrc\n# or {{app_path}} {{completion_command}} >> ~/.zprofile on OSX.\n#\n_{{app_name}}_yargs_completions()\n{\n local reply\n local si=$IFS\n IFS=$\'\n\' reply=($(COMP_CWORD="$((CURRENT-1))" COMP_LINE="$BUFFER" COMP_POINT="$CURSOR" {{app_path}} --get-yargs-completions "${words[@]}"))\n IFS=$si\n _describe \'values\' reply\n}\ncompdef _{{app_name}}_yargs_completions {{app_name}}\n###-end-{{app_name}}-completions-###\n':'###-begin-{{app_name}}-completions-###\n#\n# yargs command completion script\n#\n# Installation: {{app_path}} {{completion_command}} >> ~/.bashrc\n# or {{app_path}} {{completion_command}} >> ~/.bash_profile on OSX.\n#\n_{{app_name}}_yargs_completions()\n{\n local cur_word args type_list\n\n cur_word="${COMP_WORDS[COMP_CWORD]}"\n args=("${COMP_WORDS[@]}")\n\n # ask yargs to generate completions.\n type_list=$({{app_path}} --get-yargs-completions "${args[@]}")\n\n COMPREPLY=( $(compgen -W "${type_list}" -- ${cur_word}) )\n\n # if no match was found, fall back to filename completion\n if [ ${#COMPREPLY[@]} -eq 0 ]; then\n COMPREPLY=()\n fi\n\n return 0\n}\ncomplete -o bashdefault -o default -F _{{app_name}}_yargs_completions {{app_name}}\n###-end-{{app_name}}-completions-###\n';const i=this.shim.path.basename(t);return t.match(/\.js$/)&&(t=`./${t}`),s=s.replace(/{{app_name}}/g,i),s=s.replace(/{{completion_command}}/g,e),s.replace(/{{app_path}}/g,t)}registerFunction(t){this.customCompletionFunction=t}setParsed(t){this.aliases=t.aliases}}function N(t,e){if(0===t.length)return e.length;if(0===e.length)return t.length;const s=[];let i,n;for(i=0;i<=e.length;i++)s[i]=[i];for(n=0;n<=t.length;n++)s[0][n]=n;for(i=1;i<=e.length;i++)for(n=1;n<=t.length;n++)e.charAt(i-1)===t.charAt(n-1)?s[i][n]=s[i-1][n-1]:i>1&&n>1&&e.charAt(i-2)===t.charAt(n-1)&&e.charAt(i-1)===t.charAt(n-2)?s[i][n]=s[i-2][n-2]+1:s[i][n]=Math.min(s[i-1][n-1]+1,Math.min(s[i][n-1]+1,s[i-1][n]+1));return s[e.length][t.length]}const H=["$0","--","_"];var z,W,q,U,F,L,V,G,R,T,B,Y,K,J,Z,X,Q,tt,et,st,it,nt,rt,ot,at,ht,lt,ct,ft,dt,ut,pt,gt,mt,yt;const bt=Symbol("copyDoubleDash"),vt=Symbol("copyDoubleDash"),Ot=Symbol("deleteFromParserHintObject"),wt=Symbol("emitWarning"),Ct=Symbol("freeze"),jt=Symbol("getDollarZero"),Mt=Symbol("getParserConfiguration"),_t=Symbol("getUsageConfiguration"),kt=Symbol("guessLocale"),xt=Symbol("guessVersion"),Et=Symbol("parsePositionalNumbers"),At=Symbol("pkgUp"),Pt=Symbol("populateParserHintArray"),St=Symbol("populateParserHintSingleValueDictionary"),$t=Symbol("populateParserHintArrayDictionary"),It=Symbol("populateParserHintDictionary"),Dt=Symbol("sanitizeKey"),Nt=Symbol("setKey"),Ht=Symbol("unfreeze"),zt=Symbol("validateAsync"),Wt=Symbol("getCommandInstance"),qt=Symbol("getContext"),Ut=Symbol("getHasOutput"),Ft=Symbol("getLoggerInstance"),Lt=Symbol("getParseContext"),Vt=Symbol("getUsageInstance"),Gt=Symbol("getValidationInstance"),Rt=Symbol("hasParseCallback"),Tt=Symbol("isGlobalContext"),Bt=Symbol("postProcess"),Yt=Symbol("rebase"),Kt=Symbol("reset"),Jt=Symbol("runYargsParserAndExecuteCommands"),Zt=Symbol("runValidation"),Xt=Symbol("setHasOutput"),Qt=Symbol("kTrackManuallySetKeys");class te{constructor(t=[],e,s,i){this.customScriptName=!1,this.parsed=!1,z.set(this,void 0),W.set(this,void 0),q.set(this,{commands:[],fullCommands:[]}),U.set(this,null),F.set(this,null),L.set(this,"show-hidden"),V.set(this,null),G.set(this,!0),R.set(this,{}),T.set(this,!0),B.set(this,[]),Y.set(this,void 0),K.set(this,{}),J.set(this,!1),Z.set(this,null),X.set(this,!0),Q.set(this,void 0),tt.set(this,""),et.set(this,void 0),st.set(this,void 0),it.set(this,{}),nt.set(this,null),rt.set(this,null),ot.set(this,{}),at.set(this,{}),ht.set(this,void 0),lt.set(this,!1),ct.set(this,void 0),ft.set(this,!1),dt.set(this,!1),ut.set(this,!1),pt.set(this,void 0),gt.set(this,{}),mt.set(this,null),yt.set(this,void 0),O(this,ct,i,"f"),O(this,ht,t,"f"),O(this,W,e,"f"),O(this,st,s,"f"),O(this,Y,new w(this),"f"),this.$0=this[jt](),this[Kt](),O(this,z,v(this,z,"f"),"f"),O(this,pt,v(this,pt,"f"),"f"),O(this,yt,v(this,yt,"f"),"f"),O(this,et,v(this,et,"f"),"f"),v(this,et,"f").showHiddenOpt=v(this,L,"f"),O(this,Q,this[vt](),"f")}addHelpOpt(t,e){return h("[string|boolean] [string]",[t,e],arguments.length),v(this,Z,"f")&&(this[Ot](v(this,Z,"f")),O(this,Z,null,"f")),!1===t&&void 0===e||(O(this,Z,"string"==typeof t?t:"help","f"),this.boolean(v(this,Z,"f")),this.describe(v(this,Z,"f"),e||v(this,pt,"f").deferY18nLookup("Show help"))),this}help(t,e){return this.addHelpOpt(t,e)}addShowHiddenOpt(t,e){if(h("[string|boolean] [string]",[t,e],arguments.length),!1===t&&void 0===e)return this;const s="string"==typeof t?t:v(this,L,"f");return this.boolean(s),this.describe(s,e||v(this,pt,"f").deferY18nLookup("Show hidden options")),v(this,et,"f").showHiddenOpt=s,this}showHidden(t,e){return this.addShowHiddenOpt(t,e)}alias(t,e){return h(" [string|array]",[t,e],arguments.length),this[$t](this.alias.bind(this),"alias",t,e),this}array(t){return h("",[t],arguments.length),this[Pt]("array",t),this[Qt](t),this}boolean(t){return h("",[t],arguments.length),this[Pt]("boolean",t),this[Qt](t),this}check(t,e){return h(" [boolean]",[t,e],arguments.length),this.middleware(((e,s)=>j((()=>t(e,s.getOptions())),(s=>(s?("string"==typeof s||s instanceof Error)&&v(this,pt,"f").fail(s.toString(),s):v(this,pt,"f").fail(v(this,ct,"f").y18n.__("Argument check failed: %s",t.toString())),e)),(t=>(v(this,pt,"f").fail(t.message?t.message:t.toString(),t),e)))),!1,e),this}choices(t,e){return h(" [string|array]",[t,e],arguments.length),this[$t](this.choices.bind(this),"choices",t,e),this}coerce(t,s){if(h(" [function]",[t,s],arguments.length),Array.isArray(t)){if(!s)throw new e("coerce callback must be provided");for(const e of t)this.coerce(e,s);return this}if("object"==typeof t){for(const e of Object.keys(t))this.coerce(e,t[e]);return this}if(!s)throw new e("coerce callback must be provided");return v(this,et,"f").key[t]=!0,v(this,Y,"f").addCoerceMiddleware(((i,n)=>{let r;return Object.prototype.hasOwnProperty.call(i,t)?j((()=>(r=n.getAliases(),s(i[t]))),(e=>{i[t]=e;const s=n.getInternalMethods().getParserConfiguration()["strip-aliased"];if(r[t]&&!0!==s)for(const s of r[t])i[s]=e;return i}),(t=>{throw new e(t.message)})):i}),t),this}conflicts(t,e){return h(" [string|array]",[t,e],arguments.length),v(this,yt,"f").conflicts(t,e),this}config(t="config",e,s){return h("[object|string] [string|function] [function]",[t,e,s],arguments.length),"object"!=typeof t||Array.isArray(t)?("function"==typeof e&&(s=e,e=void 0),this.describe(t,e||v(this,pt,"f").deferY18nLookup("Path to JSON config file")),(Array.isArray(t)?t:[t]).forEach((t=>{v(this,et,"f").config[t]=s||!0})),this):(t=n(t,v(this,W,"f"),this[Mt]()["deep-merge-config"]||!1,v(this,ct,"f")),v(this,et,"f").configObjects=(v(this,et,"f").configObjects||[]).concat(t),this)}completion(t,e,s){return h("[string] [string|boolean|function] [function]",[t,e,s],arguments.length),"function"==typeof e&&(s=e,e=void 0),O(this,F,t||v(this,F,"f")||"completion","f"),e||!1===e||(e="generate completion script"),this.command(v(this,F,"f"),e),s&&v(this,U,"f").registerFunction(s),this}command(t,e,s,i,n,r){return h(" [string|boolean] [function|object] [function] [array] [boolean|string]",[t,e,s,i,n,r],arguments.length),v(this,z,"f").addHandler(t,e,s,i,n,r),this}commands(t,e,s,i,n,r){return this.command(t,e,s,i,n,r)}commandDir(t,e){h(" [object]",[t,e],arguments.length);const s=v(this,st,"f")||v(this,ct,"f").require;return v(this,z,"f").addDirectory(t,s,v(this,ct,"f").getCallerFile(),e),this}count(t){return h("",[t],arguments.length),this[Pt]("count",t),this[Qt](t),this}default(t,e,s){return h(" [*] [string]",[t,e,s],arguments.length),s&&(u(t,v(this,ct,"f")),v(this,et,"f").defaultDescription[t]=s),"function"==typeof e&&(u(t,v(this,ct,"f")),v(this,et,"f").defaultDescription[t]||(v(this,et,"f").defaultDescription[t]=v(this,pt,"f").functionDescription(e)),e=e.call()),this[St](this.default.bind(this),"default",t,e),this}defaults(t,e,s){return this.default(t,e,s)}demandCommand(t=1,e,s,i){return h("[number] [number|string] [string|null|undefined] [string|null|undefined]",[t,e,s,i],arguments.length),"number"!=typeof e&&(s=e,e=1/0),this.global("_",!1),v(this,et,"f").demandedCommands._={min:t,max:e,minMsg:s,maxMsg:i},this}demand(t,e,s){return Array.isArray(e)?(e.forEach((t=>{d(s,!0,v(this,ct,"f")),this.demandOption(t,s)})),e=1/0):"number"!=typeof e&&(s=e,e=1/0),"number"==typeof t?(d(s,!0,v(this,ct,"f")),this.demandCommand(t,e,s,s)):Array.isArray(t)?t.forEach((t=>{d(s,!0,v(this,ct,"f")),this.demandOption(t,s)})):"string"==typeof s?this.demandOption(t,s):!0!==s&&void 0!==s||this.demandOption(t),this}demandOption(t,e){return h(" [string]",[t,e],arguments.length),this[St](this.demandOption.bind(this),"demandedOptions",t,e),this}deprecateOption(t,e){return h(" [string|boolean]",[t,e],arguments.length),v(this,et,"f").deprecatedOptions[t]=e,this}describe(t,e){return h(" [string]",[t,e],arguments.length),this[Nt](t,!0),v(this,pt,"f").describe(t,e),this}detectLocale(t){return h("",[t],arguments.length),O(this,G,t,"f"),this}env(t){return h("[string|boolean]",[t],arguments.length),!1===t?delete v(this,et,"f").envPrefix:v(this,et,"f").envPrefix=t||"",this}epilogue(t){return h("",[t],arguments.length),v(this,pt,"f").epilog(t),this}epilog(t){return this.epilogue(t)}example(t,e){return h(" [string]",[t,e],arguments.length),Array.isArray(t)?t.forEach((t=>this.example(...t))):v(this,pt,"f").example(t,e),this}exit(t,e){O(this,J,!0,"f"),O(this,V,e,"f"),v(this,T,"f")&&v(this,ct,"f").process.exit(t)}exitProcess(t=!0){return h("[boolean]",[t],arguments.length),O(this,T,t,"f"),this}fail(t){if(h("",[t],arguments.length),"boolean"==typeof t&&!1!==t)throw new e("Invalid first argument. Expected function or boolean 'false'");return v(this,pt,"f").failFn(t),this}getAliases(){return this.parsed?this.parsed.aliases:{}}async getCompletion(t,e){return h(" [function]",[t,e],arguments.length),e?v(this,U,"f").getCompletion(t,e):new Promise(((e,s)=>{v(this,U,"f").getCompletion(t,((t,i)=>{t?s(t):e(i)}))}))}getDemandedOptions(){return h([],0),v(this,et,"f").demandedOptions}getDemandedCommands(){return h([],0),v(this,et,"f").demandedCommands}getDeprecatedOptions(){return h([],0),v(this,et,"f").deprecatedOptions}getDetectLocale(){return v(this,G,"f")}getExitProcess(){return v(this,T,"f")}getGroups(){return Object.assign({},v(this,K,"f"),v(this,at,"f"))}getHelp(){if(O(this,J,!0,"f"),!v(this,pt,"f").hasCachedHelpMessage()){if(!this.parsed){const t=this[Jt](v(this,ht,"f"),void 0,void 0,0,!0);if(f(t))return t.then((()=>v(this,pt,"f").help()))}const t=v(this,z,"f").runDefaultBuilderOn(this);if(f(t))return t.then((()=>v(this,pt,"f").help()))}return Promise.resolve(v(this,pt,"f").help())}getOptions(){return v(this,et,"f")}getStrict(){return v(this,ft,"f")}getStrictCommands(){return v(this,dt,"f")}getStrictOptions(){return v(this,ut,"f")}global(t,e){return h(" [boolean]",[t,e],arguments.length),t=[].concat(t),!1!==e?v(this,et,"f").local=v(this,et,"f").local.filter((e=>-1===t.indexOf(e))):t.forEach((t=>{v(this,et,"f").local.includes(t)||v(this,et,"f").local.push(t)})),this}group(t,e){h(" ",[t,e],arguments.length);const s=v(this,at,"f")[e]||v(this,K,"f")[e];v(this,at,"f")[e]&&delete v(this,at,"f")[e];const i={};return v(this,K,"f")[e]=(s||[]).concat(t).filter((t=>!i[t]&&(i[t]=!0))),this}hide(t){return h("",[t],arguments.length),v(this,et,"f").hiddenOptions.push(t),this}implies(t,e){return h(" [number|string|array]",[t,e],arguments.length),v(this,yt,"f").implies(t,e),this}locale(t){return h("[string]",[t],arguments.length),void 0===t?(this[kt](),v(this,ct,"f").y18n.getLocale()):(O(this,G,!1,"f"),v(this,ct,"f").y18n.setLocale(t),this)}middleware(t,e,s){return v(this,Y,"f").addMiddleware(t,!!e,s)}nargs(t,e){return h(" [number]",[t,e],arguments.length),this[St](this.nargs.bind(this),"narg",t,e),this}normalize(t){return h("",[t],arguments.length),this[Pt]("normalize",t),this}number(t){return h("",[t],arguments.length),this[Pt]("number",t),this[Qt](t),this}option(t,e){if(h(" [object]",[t,e],arguments.length),"object"==typeof t)Object.keys(t).forEach((e=>{this.options(e,t[e])}));else{"object"!=typeof e&&(e={}),this[Qt](t),!v(this,mt,"f")||"version"!==t&&"version"!==(null==e?void 0:e.alias)||this[wt](['"version" is a reserved word.',"Please do one of the following:",'- Disable version with `yargs.version(false)` if using "version" as an option',"- Use the built-in `yargs.version` method instead (if applicable)","- Use a different option key","https://yargs.js.org/docs/#api-reference-version"].join("\n"),void 0,"versionWarning"),v(this,et,"f").key[t]=!0,e.alias&&this.alias(t,e.alias);const s=e.deprecate||e.deprecated;s&&this.deprecateOption(t,s);const i=e.demand||e.required||e.require;i&&this.demand(t,i),e.demandOption&&this.demandOption(t,"string"==typeof e.demandOption?e.demandOption:void 0),e.conflicts&&this.conflicts(t,e.conflicts),"default"in e&&this.default(t,e.default),void 0!==e.implies&&this.implies(t,e.implies),void 0!==e.nargs&&this.nargs(t,e.nargs),e.config&&this.config(t,e.configParser),e.normalize&&this.normalize(t),e.choices&&this.choices(t,e.choices),e.coerce&&this.coerce(t,e.coerce),e.group&&this.group(t,e.group),(e.boolean||"boolean"===e.type)&&(this.boolean(t),e.alias&&this.boolean(e.alias)),(e.array||"array"===e.type)&&(this.array(t),e.alias&&this.array(e.alias)),(e.number||"number"===e.type)&&(this.number(t),e.alias&&this.number(e.alias)),(e.string||"string"===e.type)&&(this.string(t),e.alias&&this.string(e.alias)),(e.count||"count"===e.type)&&this.count(t),"boolean"==typeof e.global&&this.global(t,e.global),e.defaultDescription&&(v(this,et,"f").defaultDescription[t]=e.defaultDescription),e.skipValidation&&this.skipValidation(t);const n=e.describe||e.description||e.desc,r=v(this,pt,"f").getDescriptions();Object.prototype.hasOwnProperty.call(r,t)&&"string"!=typeof n||this.describe(t,n),e.hidden&&this.hide(t),e.requiresArg&&this.requiresArg(t)}return this}options(t,e){return this.option(t,e)}parse(t,e,s){h("[string|array] [function|boolean|object] [function]",[t,e,s],arguments.length),this[Ct](),void 0===t&&(t=v(this,ht,"f")),"object"==typeof e&&(O(this,rt,e,"f"),e=s),"function"==typeof e&&(O(this,nt,e,"f"),e=!1),e||O(this,ht,t,"f"),v(this,nt,"f")&&O(this,T,!1,"f");const i=this[Jt](t,!!e),n=this.parsed;return v(this,U,"f").setParsed(this.parsed),f(i)?i.then((t=>(v(this,nt,"f")&&v(this,nt,"f").call(this,v(this,V,"f"),t,v(this,tt,"f")),t))).catch((t=>{throw v(this,nt,"f")&&v(this,nt,"f")(t,this.parsed.argv,v(this,tt,"f")),t})).finally((()=>{this[Ht](),this.parsed=n})):(v(this,nt,"f")&&v(this,nt,"f").call(this,v(this,V,"f"),i,v(this,tt,"f")),this[Ht](),this.parsed=n,i)}parseAsync(t,e,s){const i=this.parse(t,e,s);return f(i)?i:Promise.resolve(i)}parseSync(t,s,i){const n=this.parse(t,s,i);if(f(n))throw new e(".parseSync() must not be used with asynchronous builders, handlers, or middleware");return n}parserConfiguration(t){return h("",[t],arguments.length),O(this,it,t,"f"),this}pkgConf(t,e){h(" [string]",[t,e],arguments.length);let s=null;const i=this[At](e||v(this,W,"f"));return i[t]&&"object"==typeof i[t]&&(s=n(i[t],e||v(this,W,"f"),this[Mt]()["deep-merge-config"]||!1,v(this,ct,"f")),v(this,et,"f").configObjects=(v(this,et,"f").configObjects||[]).concat(s)),this}positional(t,e){h(" ",[t,e],arguments.length);const s=["default","defaultDescription","implies","normalize","choices","conflicts","coerce","type","describe","desc","description","alias"];e=g(e,((t,e)=>!("type"===t&&!["string","number","boolean"].includes(e))&&s.includes(t)));const i=v(this,q,"f").fullCommands[v(this,q,"f").fullCommands.length-1],n=i?v(this,z,"f").cmdToParseOptions(i):{array:[],alias:{},default:{},demand:{}};return p(n).forEach((s=>{const i=n[s];Array.isArray(i)?-1!==i.indexOf(t)&&(e[s]=!0):i[t]&&!(s in e)&&(e[s]=i[t])})),this.group(t,v(this,pt,"f").getPositionalGroupName()),this.option(t,e)}recommendCommands(t=!0){return h("[boolean]",[t],arguments.length),O(this,lt,t,"f"),this}required(t,e,s){return this.demand(t,e,s)}require(t,e,s){return this.demand(t,e,s)}requiresArg(t){return h(" [number]",[t],arguments.length),"string"==typeof t&&v(this,et,"f").narg[t]||this[St](this.requiresArg.bind(this),"narg",t,NaN),this}showCompletionScript(t,e){return h("[string] [string]",[t,e],arguments.length),t=t||this.$0,v(this,Q,"f").log(v(this,U,"f").generateCompletionScript(t,e||v(this,F,"f")||"completion")),this}showHelp(t){if(h("[string|function]",[t],arguments.length),O(this,J,!0,"f"),!v(this,pt,"f").hasCachedHelpMessage()){if(!this.parsed){const e=this[Jt](v(this,ht,"f"),void 0,void 0,0,!0);if(f(e))return e.then((()=>{v(this,pt,"f").showHelp(t)})),this}const e=v(this,z,"f").runDefaultBuilderOn(this);if(f(e))return e.then((()=>{v(this,pt,"f").showHelp(t)})),this}return v(this,pt,"f").showHelp(t),this}scriptName(t){return this.customScriptName=!0,this.$0=t,this}showHelpOnFail(t,e){return h("[boolean|string] [string]",[t,e],arguments.length),v(this,pt,"f").showHelpOnFail(t,e),this}showVersion(t){return h("[string|function]",[t],arguments.length),v(this,pt,"f").showVersion(t),this}skipValidation(t){return h("",[t],arguments.length),this[Pt]("skipValidation",t),this}strict(t){return h("[boolean]",[t],arguments.length),O(this,ft,!1!==t,"f"),this}strictCommands(t){return h("[boolean]",[t],arguments.length),O(this,dt,!1!==t,"f"),this}strictOptions(t){return h("[boolean]",[t],arguments.length),O(this,ut,!1!==t,"f"),this}string(t){return h("",[t],arguments.length),this[Pt]("string",t),this[Qt](t),this}terminalWidth(){return h([],0),v(this,ct,"f").process.stdColumns}updateLocale(t){return this.updateStrings(t)}updateStrings(t){return h("",[t],arguments.length),O(this,G,!1,"f"),v(this,ct,"f").y18n.updateLocale(t),this}usage(t,s,i,n){if(h(" [string|boolean] [function|object] [function]",[t,s,i,n],arguments.length),void 0!==s){if(d(t,null,v(this,ct,"f")),(t||"").match(/^\$0( |$)/))return this.command(t,s,i,n);throw new e(".usage() description must start with $0 if being used as alias for .command()")}return v(this,pt,"f").usage(t),this}usageConfiguration(t){return h("",[t],arguments.length),O(this,gt,t,"f"),this}version(t,e,s){const i="version";if(h("[boolean|string] [string] [string]",[t,e,s],arguments.length),v(this,mt,"f")&&(this[Ot](v(this,mt,"f")),v(this,pt,"f").version(void 0),O(this,mt,null,"f")),0===arguments.length)s=this[xt](),t=i;else if(1===arguments.length){if(!1===t)return this;s=t,t=i}else 2===arguments.length&&(s=e,e=void 0);return O(this,mt,"string"==typeof t?t:i,"f"),e=e||v(this,pt,"f").deferY18nLookup("Show version number"),v(this,pt,"f").version(s||void 0),this.boolean(v(this,mt,"f")),this.describe(v(this,mt,"f"),e),this}wrap(t){return h("",[t],arguments.length),v(this,pt,"f").wrap(t),this}[(z=new WeakMap,W=new WeakMap,q=new WeakMap,U=new WeakMap,F=new WeakMap,L=new WeakMap,V=new WeakMap,G=new WeakMap,R=new WeakMap,T=new WeakMap,B=new WeakMap,Y=new WeakMap,K=new WeakMap,J=new WeakMap,Z=new WeakMap,X=new WeakMap,Q=new WeakMap,tt=new WeakMap,et=new WeakMap,st=new WeakMap,it=new WeakMap,nt=new WeakMap,rt=new WeakMap,ot=new WeakMap,at=new WeakMap,ht=new WeakMap,lt=new WeakMap,ct=new WeakMap,ft=new WeakMap,dt=new WeakMap,ut=new WeakMap,pt=new WeakMap,gt=new WeakMap,mt=new WeakMap,yt=new WeakMap,bt)](t){if(!t._||!t["--"])return t;t._.push.apply(t._,t["--"]);try{delete t["--"]}catch(t){}return t}[vt](){return{log:(...t)=>{this[Rt]()||console.log(...t),O(this,J,!0,"f"),v(this,tt,"f").length&&O(this,tt,v(this,tt,"f")+"\n","f"),O(this,tt,v(this,tt,"f")+t.join(" "),"f")},error:(...t)=>{this[Rt]()||console.error(...t),O(this,J,!0,"f"),v(this,tt,"f").length&&O(this,tt,v(this,tt,"f")+"\n","f"),O(this,tt,v(this,tt,"f")+t.join(" "),"f")}}}[Ot](t){p(v(this,et,"f")).forEach((e=>{if("configObjects"===e)return;const s=v(this,et,"f")[e];Array.isArray(s)?s.includes(t)&&s.splice(s.indexOf(t),1):"object"==typeof s&&delete s[t]})),delete v(this,pt,"f").getDescriptions()[t]}[wt](t,e,s){v(this,R,"f")[s]||(v(this,ct,"f").process.emitWarning(t,e),v(this,R,"f")[s]=!0)}[Ct](){v(this,B,"f").push({options:v(this,et,"f"),configObjects:v(this,et,"f").configObjects.slice(0),exitProcess:v(this,T,"f"),groups:v(this,K,"f"),strict:v(this,ft,"f"),strictCommands:v(this,dt,"f"),strictOptions:v(this,ut,"f"),completionCommand:v(this,F,"f"),output:v(this,tt,"f"),exitError:v(this,V,"f"),hasOutput:v(this,J,"f"),parsed:this.parsed,parseFn:v(this,nt,"f"),parseContext:v(this,rt,"f")}),v(this,pt,"f").freeze(),v(this,yt,"f").freeze(),v(this,z,"f").freeze(),v(this,Y,"f").freeze()}[jt](){let t,e="";return t=/\b(node|iojs|electron)(\.exe)?$/.test(v(this,ct,"f").process.argv()[0])?v(this,ct,"f").process.argv().slice(1,2):v(this,ct,"f").process.argv().slice(0,1),e=t.map((t=>{const e=this[Yt](v(this,W,"f"),t);return t.match(/^(\/|([a-zA-Z]:)?\\)/)&&e.lengthe.includes("package.json")?"package.json":void 0));d(i,void 0,v(this,ct,"f")),s=JSON.parse(v(this,ct,"f").readFileSync(i,"utf8"))}catch(t){}return v(this,ot,"f")[e]=s||{},v(this,ot,"f")[e]}[Pt](t,e){(e=[].concat(e)).forEach((e=>{e=this[Dt](e),v(this,et,"f")[t].push(e)}))}[St](t,e,s,i){this[It](t,e,s,i,((t,e,s)=>{v(this,et,"f")[t][e]=s}))}[$t](t,e,s,i){this[It](t,e,s,i,((t,e,s)=>{v(this,et,"f")[t][e]=(v(this,et,"f")[t][e]||[]).concat(s)}))}[It](t,e,s,i,n){if(Array.isArray(s))s.forEach((e=>{t(e,i)}));else if((t=>"object"==typeof t)(s))for(const e of p(s))t(e,s[e]);else n(e,this[Dt](s),i)}[Dt](t){return"__proto__"===t?"___proto___":t}[Nt](t,e){return this[St](this[Nt].bind(this),"key",t,e),this}[Ht](){var t,e,s,i,n,r,o,a,h,l,c,f;const u=v(this,B,"f").pop();let p;d(u,void 0,v(this,ct,"f")),t=this,e=this,s=this,i=this,n=this,r=this,o=this,a=this,h=this,l=this,c=this,f=this,({options:{set value(e){O(t,et,e,"f")}}.value,configObjects:p,exitProcess:{set value(t){O(e,T,t,"f")}}.value,groups:{set value(t){O(s,K,t,"f")}}.value,output:{set value(t){O(i,tt,t,"f")}}.value,exitError:{set value(t){O(n,V,t,"f")}}.value,hasOutput:{set value(t){O(r,J,t,"f")}}.value,parsed:this.parsed,strict:{set value(t){O(o,ft,t,"f")}}.value,strictCommands:{set value(t){O(a,dt,t,"f")}}.value,strictOptions:{set value(t){O(h,ut,t,"f")}}.value,completionCommand:{set value(t){O(l,F,t,"f")}}.value,parseFn:{set value(t){O(c,nt,t,"f")}}.value,parseContext:{set value(t){O(f,rt,t,"f")}}.value}=u),v(this,et,"f").configObjects=p,v(this,pt,"f").unfreeze(),v(this,yt,"f").unfreeze(),v(this,z,"f").unfreeze(),v(this,Y,"f").unfreeze()}[zt](t,e){return j(e,(e=>(t(e),e)))}getInternalMethods(){return{getCommandInstance:this[Wt].bind(this),getContext:this[qt].bind(this),getHasOutput:this[Ut].bind(this),getLoggerInstance:this[Ft].bind(this),getParseContext:this[Lt].bind(this),getParserConfiguration:this[Mt].bind(this),getUsageConfiguration:this[_t].bind(this),getUsageInstance:this[Vt].bind(this),getValidationInstance:this[Gt].bind(this),hasParseCallback:this[Rt].bind(this),isGlobalContext:this[Tt].bind(this),postProcess:this[Bt].bind(this),reset:this[Kt].bind(this),runValidation:this[Zt].bind(this),runYargsParserAndExecuteCommands:this[Jt].bind(this),setHasOutput:this[Xt].bind(this)}}[Wt](){return v(this,z,"f")}[qt](){return v(this,q,"f")}[Ut](){return v(this,J,"f")}[Ft](){return v(this,Q,"f")}[Lt](){return v(this,rt,"f")||{}}[Vt](){return v(this,pt,"f")}[Gt](){return v(this,yt,"f")}[Rt](){return!!v(this,nt,"f")}[Tt](){return v(this,X,"f")}[Bt](t,e,s,i){if(s)return t;if(f(t))return t;e||(t=this[bt](t));return(this[Mt]()["parse-positional-numbers"]||void 0===this[Mt]()["parse-positional-numbers"])&&(t=this[Et](t)),i&&(t=C(t,this,v(this,Y,"f").getMiddleware(),!1)),t}[Kt](t={}){O(this,et,v(this,et,"f")||{},"f");const e={};e.local=v(this,et,"f").local||[],e.configObjects=v(this,et,"f").configObjects||[];const s={};e.local.forEach((e=>{s[e]=!0,(t[e]||[]).forEach((t=>{s[t]=!0}))})),Object.assign(v(this,at,"f"),Object.keys(v(this,K,"f")).reduce(((t,e)=>{const i=v(this,K,"f")[e].filter((t=>!(t in s)));return i.length>0&&(t[e]=i),t}),{})),O(this,K,{},"f");return["array","boolean","string","skipValidation","count","normalize","number","hiddenOptions"].forEach((t=>{e[t]=(v(this,et,"f")[t]||[]).filter((t=>!s[t]))})),["narg","key","alias","default","defaultDescription","config","choices","demandedOptions","demandedCommands","deprecatedOptions"].forEach((t=>{e[t]=g(v(this,et,"f")[t],(t=>!s[t]))})),e.envPrefix=v(this,et,"f").envPrefix,O(this,et,e,"f"),O(this,pt,v(this,pt,"f")?v(this,pt,"f").reset(s):P(this,v(this,ct,"f")),"f"),O(this,yt,v(this,yt,"f")?v(this,yt,"f").reset(s):function(t,e,s){const i=s.y18n.__,n=s.y18n.__n,r={nonOptionCount:function(s){const i=t.getDemandedCommands(),r=s._.length+(s["--"]?s["--"].length:0)-t.getInternalMethods().getContext().commands.length;i._&&(ri._.max)&&(ri._.max&&(void 0!==i._.maxMsg?e.fail(i._.maxMsg?i._.maxMsg.replace(/\$0/g,r.toString()).replace(/\$1/,i._.max.toString()):null):e.fail(n("Too many non-option arguments: got %s, maximum of %s","Too many non-option arguments: got %s, maximum of %s",r,r.toString(),i._.max.toString()))))},positionalCount:function(t,s){s{H.includes(e)||Object.prototype.hasOwnProperty.call(o,e)||Object.prototype.hasOwnProperty.call(t.getInternalMethods().getParseContext(),e)||r.isValidAndSomeAliasIsNotNew(e,i)||f.push(e)})),h&&(d.commands.length>0||c.length>0||a)&&s._.slice(d.commands.length).forEach((t=>{c.includes(""+t)||f.push(""+t)})),h){const e=(null===(l=t.getDemandedCommands()._)||void 0===l?void 0:l.max)||0,i=d.commands.length+e;i{t=String(t),d.commands.includes(t)||f.includes(t)||f.push(t)}))}f.length&&e.fail(n("Unknown argument: %s","Unknown arguments: %s",f.length,f.map((t=>t.trim()?t:`"${t}"`)).join(", ")))},unknownCommands:function(s){const i=t.getInternalMethods().getCommandInstance().getCommands(),r=[],o=t.getInternalMethods().getContext();return(o.commands.length>0||i.length>0)&&s._.slice(o.commands.length).forEach((t=>{i.includes(""+t)||r.push(""+t)})),r.length>0&&(e.fail(n("Unknown command: %s","Unknown commands: %s",r.length,r.join(", "))),!0)},isValidAndSomeAliasIsNotNew:function(e,s){if(!Object.prototype.hasOwnProperty.call(s,e))return!1;const i=t.parsed.newAliases;return[e,...s[e]].some((t=>!Object.prototype.hasOwnProperty.call(i,t)||!i[e]))},limitedChoices:function(s){const n=t.getOptions(),r={};if(!Object.keys(n.choices).length)return;Object.keys(s).forEach((t=>{-1===H.indexOf(t)&&Object.prototype.hasOwnProperty.call(n.choices,t)&&[].concat(s[t]).forEach((e=>{-1===n.choices[t].indexOf(e)&&void 0!==e&&(r[t]=(r[t]||[]).concat(e))}))}));const o=Object.keys(r);if(!o.length)return;let a=i("Invalid values:");o.forEach((t=>{a+=`\n ${i("Argument: %s, Given: %s, Choices: %s",t,e.stringifiedValues(r[t]),e.stringifiedValues(n.choices[t]))}`})),e.fail(a)}};let o={};function a(t,e){const s=Number(e);return"number"==typeof(e=isNaN(s)?e:s)?e=t._.length>=e:e.match(/^--no-.+/)?(e=e.match(/^--no-(.+)/)[1],e=!Object.prototype.hasOwnProperty.call(t,e)):e=Object.prototype.hasOwnProperty.call(t,e),e}r.implies=function(e,i){h(" [array|number|string]",[e,i],arguments.length),"object"==typeof e?Object.keys(e).forEach((t=>{r.implies(t,e[t])})):(t.global(e),o[e]||(o[e]=[]),Array.isArray(i)?i.forEach((t=>r.implies(e,t))):(d(i,void 0,s),o[e].push(i)))},r.getImplied=function(){return o},r.implications=function(t){const s=[];if(Object.keys(o).forEach((e=>{const i=e;(o[e]||[]).forEach((e=>{let n=i;const r=e;n=a(t,n),e=a(t,e),n&&!e&&s.push(` ${i} -> ${r}`)}))})),s.length){let t=`${i("Implications failed:")}\n`;s.forEach((e=>{t+=e})),e.fail(t)}};let l={};r.conflicts=function(e,s){h(" [array|string]",[e,s],arguments.length),"object"==typeof e?Object.keys(e).forEach((t=>{r.conflicts(t,e[t])})):(t.global(e),l[e]||(l[e]=[]),Array.isArray(s)?s.forEach((t=>r.conflicts(e,t))):l[e].push(s))},r.getConflicting=()=>l,r.conflicting=function(n){Object.keys(n).forEach((t=>{l[t]&&l[t].forEach((s=>{s&&void 0!==n[t]&&void 0!==n[s]&&e.fail(i("Arguments %s and %s are mutually exclusive",t,s))}))})),t.getInternalMethods().getParserConfiguration()["strip-dashed"]&&Object.keys(l).forEach((t=>{l[t].forEach((r=>{r&&void 0!==n[s.Parser.camelCase(t)]&&void 0!==n[s.Parser.camelCase(r)]&&e.fail(i("Arguments %s and %s are mutually exclusive",t,r))}))}))},r.recommendCommands=function(t,s){s=s.sort(((t,e)=>e.length-t.length));let n=null,r=1/0;for(let e,i=0;void 0!==(e=s[i]);i++){const s=N(t,e);s<=3&&s!t[e])),l=g(l,(e=>!t[e])),r};const c=[];return r.freeze=function(){c.push({implied:o,conflicting:l})},r.unfreeze=function(){const t=c.pop();d(t,void 0,s),({implied:o,conflicting:l}=t)},r}(this,v(this,pt,"f"),v(this,ct,"f")),"f"),O(this,z,v(this,z,"f")?v(this,z,"f").reset():function(t,e,s,i){return new _(t,e,s,i)}(v(this,pt,"f"),v(this,yt,"f"),v(this,Y,"f"),v(this,ct,"f")),"f"),v(this,U,"f")||O(this,U,function(t,e,s,i){return new D(t,e,s,i)}(this,v(this,pt,"f"),v(this,z,"f"),v(this,ct,"f")),"f"),v(this,Y,"f").reset(),O(this,F,null,"f"),O(this,tt,"","f"),O(this,V,null,"f"),O(this,J,!1,"f"),this.parsed=!1,this}[Yt](t,e){return v(this,ct,"f").path.relative(t,e)}[Jt](t,s,i,n=0,r=!1){let o=!!i||r;t=t||v(this,ht,"f"),v(this,et,"f").__=v(this,ct,"f").y18n.__,v(this,et,"f").configuration=this[Mt]();const a=!!v(this,et,"f").configuration["populate--"],h=Object.assign({},v(this,et,"f").configuration,{"populate--":!0}),l=v(this,ct,"f").Parser.detailed(t,Object.assign({},v(this,et,"f"),{configuration:{"parse-positional-numbers":!1,...h}})),c=Object.assign(l.argv,v(this,rt,"f"));let d;const u=l.aliases;let p=!1,g=!1;Object.keys(c).forEach((t=>{t===v(this,Z,"f")&&c[t]?p=!0:t===v(this,mt,"f")&&c[t]&&(g=!0)})),c.$0=this.$0,this.parsed=l,0===n&&v(this,pt,"f").clearCachedHelpMessage();try{if(this[kt](),s)return this[Bt](c,a,!!i,!1);if(v(this,Z,"f")){[v(this,Z,"f")].concat(u[v(this,Z,"f")]||[]).filter((t=>t.length>1)).includes(""+c._[c._.length-1])&&(c._.pop(),p=!0)}O(this,X,!1,"f");const h=v(this,z,"f").getCommands(),m=v(this,U,"f").completionKey in c,y=p||m||r;if(c._.length){if(h.length){let t;for(let e,s=n||0;void 0!==c._[s];s++){if(e=String(c._[s]),h.includes(e)&&e!==v(this,F,"f")){const t=v(this,z,"f").runCommand(e,this,l,s+1,r,p||g||r);return this[Bt](t,a,!!i,!1)}if(!t&&e!==v(this,F,"f")){t=e;break}}!v(this,z,"f").hasDefaultCommand()&&v(this,lt,"f")&&t&&!y&&v(this,yt,"f").recommendCommands(t,h)}v(this,F,"f")&&c._.includes(v(this,F,"f"))&&!m&&(v(this,T,"f")&&E(!0),this.showCompletionScript(),this.exit(0))}if(v(this,z,"f").hasDefaultCommand()&&!y){const t=v(this,z,"f").runCommand(null,this,l,0,r,p||g||r);return this[Bt](t,a,!!i,!1)}if(m){v(this,T,"f")&&E(!0);const s=(t=[].concat(t)).slice(t.indexOf(`--${v(this,U,"f").completionKey}`)+1);return v(this,U,"f").getCompletion(s,((t,s)=>{if(t)throw new e(t.message);(s||[]).forEach((t=>{v(this,Q,"f").log(t)})),this.exit(0)})),this[Bt](c,!a,!!i,!1)}if(v(this,J,"f")||(p?(v(this,T,"f")&&E(!0),o=!0,this.showHelp("log"),this.exit(0)):g&&(v(this,T,"f")&&E(!0),o=!0,v(this,pt,"f").showVersion("log"),this.exit(0))),!o&&v(this,et,"f").skipValidation.length>0&&(o=Object.keys(c).some((t=>v(this,et,"f").skipValidation.indexOf(t)>=0&&!0===c[t]))),!o){if(l.error)throw new e(l.error.message);if(!m){const t=this[Zt](u,{},l.error);i||(d=C(c,this,v(this,Y,"f").getMiddleware(),!0)),d=this[zt](t,null!=d?d:c),f(d)&&!i&&(d=d.then((()=>C(c,this,v(this,Y,"f").getMiddleware(),!1))))}}}catch(t){if(!(t instanceof e))throw t;v(this,pt,"f").fail(t.message,t)}return this[Bt](null!=d?d:c,a,!!i,!0)}[Zt](t,s,i,n){const r={...this.getDemandedOptions()};return o=>{if(i)throw new e(i.message);v(this,yt,"f").nonOptionCount(o),v(this,yt,"f").requiredArguments(o,r);let a=!1;v(this,dt,"f")&&(a=v(this,yt,"f").unknownCommands(o)),v(this,ft,"f")&&!a?v(this,yt,"f").unknownArguments(o,t,s,!!n):v(this,ut,"f")&&v(this,yt,"f").unknownArguments(o,t,{},!1,!1),v(this,yt,"f").limitedChoices(o),v(this,yt,"f").implications(o),v(this,yt,"f").conflicting(o)}}[Xt](){O(this,J,!0,"f")}[Qt](t){if("string"==typeof t)v(this,et,"f").key[t]=!0;else for(const e of t)v(this,et,"f").key[e]=!0}}var ee,se;const{readFileSync:ie}=require("fs"),{inspect:ne}=require("util"),{resolve:re}=require("path"),oe=require("y18n"),ae=require("yargs-parser");var he,le={assert:{notStrictEqual:t.notStrictEqual,strictEqual:t.strictEqual},cliui:require("cliui"),findUp:require("escalade/sync"),getEnv:t=>process.env[t],getCallerFile:require("get-caller-file"),getProcessArgvBin:y,inspect:ne,mainFilename:null!==(se=null===(ee=null===require||void 0===require?void 0:require.main)||void 0===ee?void 0:ee.filename)&&void 0!==se?se:process.cwd(),Parser:ae,path:require("path"),process:{argv:()=>process.argv,cwd:process.cwd,emitWarning:(t,e)=>process.emitWarning(t,e),execPath:()=>process.execPath,exit:t=>{process.exit(t)},nextTick:process.nextTick,stdColumns:void 0!==process.stdout.columns?process.stdout.columns:null},readFileSync:ie,require:require,requireDirectory:require("require-directory"),stringWidth:require("string-width"),y18n:oe({directory:re(__dirname,"../locales"),updateFiles:!1})};const ce=(null===(he=null===process||void 0===process?void 0:process.env)||void 0===he?void 0:he.YARGS_MIN_NODE_VERSION)?Number(process.env.YARGS_MIN_NODE_VERSION):12;if(process&&process.version){if(Number(process.version.match(/v([^.]+)/)[1]){const i=new te(t,e,s,de);return Object.defineProperty(i,"argv",{get:()=>i.parse(),enumerable:!0}),i.help(),i.version(),i}),argsert:h,isPromise:f,objFilter:g,parseCommand:o,Parser:fe,processArgv:b,YError:e};module.exports=ue; diff --git a/node_modules/yargs/build/lib/argsert.js b/node_modules/yargs/build/lib/argsert.js deleted file mode 100644 index be5b3aa..0000000 --- a/node_modules/yargs/build/lib/argsert.js +++ /dev/null @@ -1,62 +0,0 @@ -import { YError } from './yerror.js'; -import { parseCommand } from './parse-command.js'; -const positionName = ['first', 'second', 'third', 'fourth', 'fifth', 'sixth']; -export function argsert(arg1, arg2, arg3) { - function parseArgs() { - return typeof arg1 === 'object' - ? [{ demanded: [], optional: [] }, arg1, arg2] - : [ - parseCommand(`cmd ${arg1}`), - arg2, - arg3, - ]; - } - try { - let position = 0; - const [parsed, callerArguments, _length] = parseArgs(); - const args = [].slice.call(callerArguments); - while (args.length && args[args.length - 1] === undefined) - args.pop(); - const length = _length || args.length; - if (length < parsed.demanded.length) { - throw new YError(`Not enough arguments provided. Expected ${parsed.demanded.length} but received ${args.length}.`); - } - const totalCommands = parsed.demanded.length + parsed.optional.length; - if (length > totalCommands) { - throw new YError(`Too many arguments provided. Expected max ${totalCommands} but received ${length}.`); - } - parsed.demanded.forEach(demanded => { - const arg = args.shift(); - const observedType = guessType(arg); - const matchingTypes = demanded.cmd.filter(type => type === observedType || type === '*'); - if (matchingTypes.length === 0) - argumentTypeError(observedType, demanded.cmd, position); - position += 1; - }); - parsed.optional.forEach(optional => { - if (args.length === 0) - return; - const arg = args.shift(); - const observedType = guessType(arg); - const matchingTypes = optional.cmd.filter(type => type === observedType || type === '*'); - if (matchingTypes.length === 0) - argumentTypeError(observedType, optional.cmd, position); - position += 1; - }); - } - catch (err) { - console.warn(err.stack); - } -} -function guessType(arg) { - if (Array.isArray(arg)) { - return 'array'; - } - else if (arg === null) { - return 'null'; - } - return typeof arg; -} -function argumentTypeError(observedType, allowedTypes, position) { - throw new YError(`Invalid ${positionName[position] || 'manyith'} argument. Expected ${allowedTypes.join(' or ')} but received ${observedType}.`); -} diff --git a/node_modules/yargs/build/lib/command.js b/node_modules/yargs/build/lib/command.js deleted file mode 100644 index 47c1ed6..0000000 --- a/node_modules/yargs/build/lib/command.js +++ /dev/null @@ -1,449 +0,0 @@ -import { assertNotStrictEqual, } from './typings/common-types.js'; -import { isPromise } from './utils/is-promise.js'; -import { applyMiddleware, commandMiddlewareFactory, } from './middleware.js'; -import { parseCommand } from './parse-command.js'; -import { isYargsInstance, } from './yargs-factory.js'; -import { maybeAsyncResult } from './utils/maybe-async-result.js'; -import whichModule from './utils/which-module.js'; -const DEFAULT_MARKER = /(^\*)|(^\$0)/; -export class CommandInstance { - constructor(usage, validation, globalMiddleware, shim) { - this.requireCache = new Set(); - this.handlers = {}; - this.aliasMap = {}; - this.frozens = []; - this.shim = shim; - this.usage = usage; - this.globalMiddleware = globalMiddleware; - this.validation = validation; - } - addDirectory(dir, req, callerFile, opts) { - opts = opts || {}; - if (typeof opts.recurse !== 'boolean') - opts.recurse = false; - if (!Array.isArray(opts.extensions)) - opts.extensions = ['js']; - const parentVisit = typeof opts.visit === 'function' ? opts.visit : (o) => o; - opts.visit = (obj, joined, filename) => { - const visited = parentVisit(obj, joined, filename); - if (visited) { - if (this.requireCache.has(joined)) - return visited; - else - this.requireCache.add(joined); - this.addHandler(visited); - } - return visited; - }; - this.shim.requireDirectory({ require: req, filename: callerFile }, dir, opts); - } - addHandler(cmd, description, builder, handler, commandMiddleware, deprecated) { - let aliases = []; - const middlewares = commandMiddlewareFactory(commandMiddleware); - handler = handler || (() => { }); - if (Array.isArray(cmd)) { - if (isCommandAndAliases(cmd)) { - [cmd, ...aliases] = cmd; - } - else { - for (const command of cmd) { - this.addHandler(command); - } - } - } - else if (isCommandHandlerDefinition(cmd)) { - let command = Array.isArray(cmd.command) || typeof cmd.command === 'string' - ? cmd.command - : this.moduleName(cmd); - if (cmd.aliases) - command = [].concat(command).concat(cmd.aliases); - this.addHandler(command, this.extractDesc(cmd), cmd.builder, cmd.handler, cmd.middlewares, cmd.deprecated); - return; - } - else if (isCommandBuilderDefinition(builder)) { - this.addHandler([cmd].concat(aliases), description, builder.builder, builder.handler, builder.middlewares, builder.deprecated); - return; - } - if (typeof cmd === 'string') { - const parsedCommand = parseCommand(cmd); - aliases = aliases.map(alias => parseCommand(alias).cmd); - let isDefault = false; - const parsedAliases = [parsedCommand.cmd].concat(aliases).filter(c => { - if (DEFAULT_MARKER.test(c)) { - isDefault = true; - return false; - } - return true; - }); - if (parsedAliases.length === 0 && isDefault) - parsedAliases.push('$0'); - if (isDefault) { - parsedCommand.cmd = parsedAliases[0]; - aliases = parsedAliases.slice(1); - cmd = cmd.replace(DEFAULT_MARKER, parsedCommand.cmd); - } - aliases.forEach(alias => { - this.aliasMap[alias] = parsedCommand.cmd; - }); - if (description !== false) { - this.usage.command(cmd, description, isDefault, aliases, deprecated); - } - this.handlers[parsedCommand.cmd] = { - original: cmd, - description, - handler, - builder: builder || {}, - middlewares, - deprecated, - demanded: parsedCommand.demanded, - optional: parsedCommand.optional, - }; - if (isDefault) - this.defaultCommand = this.handlers[parsedCommand.cmd]; - } - } - getCommandHandlers() { - return this.handlers; - } - getCommands() { - return Object.keys(this.handlers).concat(Object.keys(this.aliasMap)); - } - hasDefaultCommand() { - return !!this.defaultCommand; - } - runCommand(command, yargs, parsed, commandIndex, helpOnly, helpOrVersionSet) { - const commandHandler = this.handlers[command] || - this.handlers[this.aliasMap[command]] || - this.defaultCommand; - const currentContext = yargs.getInternalMethods().getContext(); - const parentCommands = currentContext.commands.slice(); - const isDefaultCommand = !command; - if (command) { - currentContext.commands.push(command); - currentContext.fullCommands.push(commandHandler.original); - } - const builderResult = this.applyBuilderUpdateUsageAndParse(isDefaultCommand, commandHandler, yargs, parsed.aliases, parentCommands, commandIndex, helpOnly, helpOrVersionSet); - return isPromise(builderResult) - ? builderResult.then(result => this.applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, result.innerArgv, currentContext, helpOnly, result.aliases, yargs)) - : this.applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, builderResult.innerArgv, currentContext, helpOnly, builderResult.aliases, yargs); - } - applyBuilderUpdateUsageAndParse(isDefaultCommand, commandHandler, yargs, aliases, parentCommands, commandIndex, helpOnly, helpOrVersionSet) { - const builder = commandHandler.builder; - let innerYargs = yargs; - if (isCommandBuilderCallback(builder)) { - yargs.getInternalMethods().getUsageInstance().freeze(); - const builderOutput = builder(yargs.getInternalMethods().reset(aliases), helpOrVersionSet); - if (isPromise(builderOutput)) { - return builderOutput.then(output => { - innerYargs = isYargsInstance(output) ? output : yargs; - return this.parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly); - }); - } - } - else if (isCommandBuilderOptionDefinitions(builder)) { - yargs.getInternalMethods().getUsageInstance().freeze(); - innerYargs = yargs.getInternalMethods().reset(aliases); - Object.keys(commandHandler.builder).forEach(key => { - innerYargs.option(key, builder[key]); - }); - } - return this.parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly); - } - parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly) { - if (isDefaultCommand) - innerYargs.getInternalMethods().getUsageInstance().unfreeze(true); - if (this.shouldUpdateUsage(innerYargs)) { - innerYargs - .getInternalMethods() - .getUsageInstance() - .usage(this.usageFromParentCommandsCommandHandler(parentCommands, commandHandler), commandHandler.description); - } - const innerArgv = innerYargs - .getInternalMethods() - .runYargsParserAndExecuteCommands(null, undefined, true, commandIndex, helpOnly); - return isPromise(innerArgv) - ? innerArgv.then(argv => ({ - aliases: innerYargs.parsed.aliases, - innerArgv: argv, - })) - : { - aliases: innerYargs.parsed.aliases, - innerArgv: innerArgv, - }; - } - shouldUpdateUsage(yargs) { - return (!yargs.getInternalMethods().getUsageInstance().getUsageDisabled() && - yargs.getInternalMethods().getUsageInstance().getUsage().length === 0); - } - usageFromParentCommandsCommandHandler(parentCommands, commandHandler) { - const c = DEFAULT_MARKER.test(commandHandler.original) - ? commandHandler.original.replace(DEFAULT_MARKER, '').trim() - : commandHandler.original; - const pc = parentCommands.filter(c => { - return !DEFAULT_MARKER.test(c); - }); - pc.push(c); - return `$0 ${pc.join(' ')}`; - } - handleValidationAndGetResult(isDefaultCommand, commandHandler, innerArgv, currentContext, aliases, yargs, middlewares, positionalMap) { - if (!yargs.getInternalMethods().getHasOutput()) { - const validation = yargs - .getInternalMethods() - .runValidation(aliases, positionalMap, yargs.parsed.error, isDefaultCommand); - innerArgv = maybeAsyncResult(innerArgv, result => { - validation(result); - return result; - }); - } - if (commandHandler.handler && !yargs.getInternalMethods().getHasOutput()) { - yargs.getInternalMethods().setHasOutput(); - const populateDoubleDash = !!yargs.getOptions().configuration['populate--']; - yargs - .getInternalMethods() - .postProcess(innerArgv, populateDoubleDash, false, false); - innerArgv = applyMiddleware(innerArgv, yargs, middlewares, false); - innerArgv = maybeAsyncResult(innerArgv, result => { - const handlerResult = commandHandler.handler(result); - return isPromise(handlerResult) - ? handlerResult.then(() => result) - : result; - }); - if (!isDefaultCommand) { - yargs.getInternalMethods().getUsageInstance().cacheHelpMessage(); - } - if (isPromise(innerArgv) && - !yargs.getInternalMethods().hasParseCallback()) { - innerArgv.catch(error => { - try { - yargs.getInternalMethods().getUsageInstance().fail(null, error); - } - catch (_err) { - } - }); - } - } - if (!isDefaultCommand) { - currentContext.commands.pop(); - currentContext.fullCommands.pop(); - } - return innerArgv; - } - applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, innerArgv, currentContext, helpOnly, aliases, yargs) { - let positionalMap = {}; - if (helpOnly) - return innerArgv; - if (!yargs.getInternalMethods().getHasOutput()) { - positionalMap = this.populatePositionals(commandHandler, innerArgv, currentContext, yargs); - } - const middlewares = this.globalMiddleware - .getMiddleware() - .slice(0) - .concat(commandHandler.middlewares); - const maybePromiseArgv = applyMiddleware(innerArgv, yargs, middlewares, true); - return isPromise(maybePromiseArgv) - ? maybePromiseArgv.then(resolvedInnerArgv => this.handleValidationAndGetResult(isDefaultCommand, commandHandler, resolvedInnerArgv, currentContext, aliases, yargs, middlewares, positionalMap)) - : this.handleValidationAndGetResult(isDefaultCommand, commandHandler, maybePromiseArgv, currentContext, aliases, yargs, middlewares, positionalMap); - } - populatePositionals(commandHandler, argv, context, yargs) { - argv._ = argv._.slice(context.commands.length); - const demanded = commandHandler.demanded.slice(0); - const optional = commandHandler.optional.slice(0); - const positionalMap = {}; - this.validation.positionalCount(demanded.length, argv._.length); - while (demanded.length) { - const demand = demanded.shift(); - this.populatePositional(demand, argv, positionalMap); - } - while (optional.length) { - const maybe = optional.shift(); - this.populatePositional(maybe, argv, positionalMap); - } - argv._ = context.commands.concat(argv._.map(a => '' + a)); - this.postProcessPositionals(argv, positionalMap, this.cmdToParseOptions(commandHandler.original), yargs); - return positionalMap; - } - populatePositional(positional, argv, positionalMap) { - const cmd = positional.cmd[0]; - if (positional.variadic) { - positionalMap[cmd] = argv._.splice(0).map(String); - } - else { - if (argv._.length) - positionalMap[cmd] = [String(argv._.shift())]; - } - } - cmdToParseOptions(cmdString) { - const parseOptions = { - array: [], - default: {}, - alias: {}, - demand: {}, - }; - const parsed = parseCommand(cmdString); - parsed.demanded.forEach(d => { - const [cmd, ...aliases] = d.cmd; - if (d.variadic) { - parseOptions.array.push(cmd); - parseOptions.default[cmd] = []; - } - parseOptions.alias[cmd] = aliases; - parseOptions.demand[cmd] = true; - }); - parsed.optional.forEach(o => { - const [cmd, ...aliases] = o.cmd; - if (o.variadic) { - parseOptions.array.push(cmd); - parseOptions.default[cmd] = []; - } - parseOptions.alias[cmd] = aliases; - }); - return parseOptions; - } - postProcessPositionals(argv, positionalMap, parseOptions, yargs) { - const options = Object.assign({}, yargs.getOptions()); - options.default = Object.assign(parseOptions.default, options.default); - for (const key of Object.keys(parseOptions.alias)) { - options.alias[key] = (options.alias[key] || []).concat(parseOptions.alias[key]); - } - options.array = options.array.concat(parseOptions.array); - options.config = {}; - const unparsed = []; - Object.keys(positionalMap).forEach(key => { - positionalMap[key].map(value => { - if (options.configuration['unknown-options-as-args']) - options.key[key] = true; - unparsed.push(`--${key}`); - unparsed.push(value); - }); - }); - if (!unparsed.length) - return; - const config = Object.assign({}, options.configuration, { - 'populate--': false, - }); - const parsed = this.shim.Parser.detailed(unparsed, Object.assign({}, options, { - configuration: config, - })); - if (parsed.error) { - yargs - .getInternalMethods() - .getUsageInstance() - .fail(parsed.error.message, parsed.error); - } - else { - const positionalKeys = Object.keys(positionalMap); - Object.keys(positionalMap).forEach(key => { - positionalKeys.push(...parsed.aliases[key]); - }); - Object.keys(parsed.argv).forEach(key => { - if (positionalKeys.includes(key)) { - if (!positionalMap[key]) - positionalMap[key] = parsed.argv[key]; - if (!this.isInConfigs(yargs, key) && - !this.isDefaulted(yargs, key) && - Object.prototype.hasOwnProperty.call(argv, key) && - Object.prototype.hasOwnProperty.call(parsed.argv, key) && - (Array.isArray(argv[key]) || Array.isArray(parsed.argv[key]))) { - argv[key] = [].concat(argv[key], parsed.argv[key]); - } - else { - argv[key] = parsed.argv[key]; - } - } - }); - } - } - isDefaulted(yargs, key) { - const { default: defaults } = yargs.getOptions(); - return (Object.prototype.hasOwnProperty.call(defaults, key) || - Object.prototype.hasOwnProperty.call(defaults, this.shim.Parser.camelCase(key))); - } - isInConfigs(yargs, key) { - const { configObjects } = yargs.getOptions(); - return (configObjects.some(c => Object.prototype.hasOwnProperty.call(c, key)) || - configObjects.some(c => Object.prototype.hasOwnProperty.call(c, this.shim.Parser.camelCase(key)))); - } - runDefaultBuilderOn(yargs) { - if (!this.defaultCommand) - return; - if (this.shouldUpdateUsage(yargs)) { - const commandString = DEFAULT_MARKER.test(this.defaultCommand.original) - ? this.defaultCommand.original - : this.defaultCommand.original.replace(/^[^[\]<>]*/, '$0 '); - yargs - .getInternalMethods() - .getUsageInstance() - .usage(commandString, this.defaultCommand.description); - } - const builder = this.defaultCommand.builder; - if (isCommandBuilderCallback(builder)) { - return builder(yargs, true); - } - else if (!isCommandBuilderDefinition(builder)) { - Object.keys(builder).forEach(key => { - yargs.option(key, builder[key]); - }); - } - return undefined; - } - moduleName(obj) { - const mod = whichModule(obj); - if (!mod) - throw new Error(`No command name given for module: ${this.shim.inspect(obj)}`); - return this.commandFromFilename(mod.filename); - } - commandFromFilename(filename) { - return this.shim.path.basename(filename, this.shim.path.extname(filename)); - } - extractDesc({ describe, description, desc }) { - for (const test of [describe, description, desc]) { - if (typeof test === 'string' || test === false) - return test; - assertNotStrictEqual(test, true, this.shim); - } - return false; - } - freeze() { - this.frozens.push({ - handlers: this.handlers, - aliasMap: this.aliasMap, - defaultCommand: this.defaultCommand, - }); - } - unfreeze() { - const frozen = this.frozens.pop(); - assertNotStrictEqual(frozen, undefined, this.shim); - ({ - handlers: this.handlers, - aliasMap: this.aliasMap, - defaultCommand: this.defaultCommand, - } = frozen); - } - reset() { - this.handlers = {}; - this.aliasMap = {}; - this.defaultCommand = undefined; - this.requireCache = new Set(); - return this; - } -} -export function command(usage, validation, globalMiddleware, shim) { - return new CommandInstance(usage, validation, globalMiddleware, shim); -} -export function isCommandBuilderDefinition(builder) { - return (typeof builder === 'object' && - !!builder.builder && - typeof builder.handler === 'function'); -} -function isCommandAndAliases(cmd) { - return cmd.every(c => typeof c === 'string'); -} -export function isCommandBuilderCallback(builder) { - return typeof builder === 'function'; -} -function isCommandBuilderOptionDefinitions(builder) { - return typeof builder === 'object'; -} -export function isCommandHandlerDefinition(cmd) { - return typeof cmd === 'object' && !Array.isArray(cmd); -} diff --git a/node_modules/yargs/build/lib/completion-templates.js b/node_modules/yargs/build/lib/completion-templates.js deleted file mode 100644 index 2c4dcb5..0000000 --- a/node_modules/yargs/build/lib/completion-templates.js +++ /dev/null @@ -1,48 +0,0 @@ -export const completionShTemplate = `###-begin-{{app_name}}-completions-### -# -# yargs command completion script -# -# Installation: {{app_path}} {{completion_command}} >> ~/.bashrc -# or {{app_path}} {{completion_command}} >> ~/.bash_profile on OSX. -# -_{{app_name}}_yargs_completions() -{ - local cur_word args type_list - - cur_word="\${COMP_WORDS[COMP_CWORD]}" - args=("\${COMP_WORDS[@]}") - - # ask yargs to generate completions. - type_list=$({{app_path}} --get-yargs-completions "\${args[@]}") - - COMPREPLY=( $(compgen -W "\${type_list}" -- \${cur_word}) ) - - # if no match was found, fall back to filename completion - if [ \${#COMPREPLY[@]} -eq 0 ]; then - COMPREPLY=() - fi - - return 0 -} -complete -o bashdefault -o default -F _{{app_name}}_yargs_completions {{app_name}} -###-end-{{app_name}}-completions-### -`; -export const completionZshTemplate = `#compdef {{app_name}} -###-begin-{{app_name}}-completions-### -# -# yargs command completion script -# -# Installation: {{app_path}} {{completion_command}} >> ~/.zshrc -# or {{app_path}} {{completion_command}} >> ~/.zprofile on OSX. -# -_{{app_name}}_yargs_completions() -{ - local reply - local si=$IFS - IFS=$'\n' reply=($(COMP_CWORD="$((CURRENT-1))" COMP_LINE="$BUFFER" COMP_POINT="$CURSOR" {{app_path}} --get-yargs-completions "\${words[@]}")) - IFS=$si - _describe 'values' reply -} -compdef _{{app_name}}_yargs_completions {{app_name}} -###-end-{{app_name}}-completions-### -`; diff --git a/node_modules/yargs/build/lib/completion.js b/node_modules/yargs/build/lib/completion.js deleted file mode 100644 index cef2bbe..0000000 --- a/node_modules/yargs/build/lib/completion.js +++ /dev/null @@ -1,243 +0,0 @@ -import { isCommandBuilderCallback } from './command.js'; -import { assertNotStrictEqual } from './typings/common-types.js'; -import * as templates from './completion-templates.js'; -import { isPromise } from './utils/is-promise.js'; -import { parseCommand } from './parse-command.js'; -export class Completion { - constructor(yargs, usage, command, shim) { - var _a, _b, _c; - this.yargs = yargs; - this.usage = usage; - this.command = command; - this.shim = shim; - this.completionKey = 'get-yargs-completions'; - this.aliases = null; - this.customCompletionFunction = null; - this.indexAfterLastReset = 0; - this.zshShell = - (_c = (((_a = this.shim.getEnv('SHELL')) === null || _a === void 0 ? void 0 : _a.includes('zsh')) || - ((_b = this.shim.getEnv('ZSH_NAME')) === null || _b === void 0 ? void 0 : _b.includes('zsh')))) !== null && _c !== void 0 ? _c : false; - } - defaultCompletion(args, argv, current, done) { - const handlers = this.command.getCommandHandlers(); - for (let i = 0, ii = args.length; i < ii; ++i) { - if (handlers[args[i]] && handlers[args[i]].builder) { - const builder = handlers[args[i]].builder; - if (isCommandBuilderCallback(builder)) { - this.indexAfterLastReset = i + 1; - const y = this.yargs.getInternalMethods().reset(); - builder(y, true); - return y.argv; - } - } - } - const completions = []; - this.commandCompletions(completions, args, current); - this.optionCompletions(completions, args, argv, current); - this.choicesFromOptionsCompletions(completions, args, argv, current); - this.choicesFromPositionalsCompletions(completions, args, argv, current); - done(null, completions); - } - commandCompletions(completions, args, current) { - const parentCommands = this.yargs - .getInternalMethods() - .getContext().commands; - if (!current.match(/^-/) && - parentCommands[parentCommands.length - 1] !== current && - !this.previousArgHasChoices(args)) { - this.usage.getCommands().forEach(usageCommand => { - const commandName = parseCommand(usageCommand[0]).cmd; - if (args.indexOf(commandName) === -1) { - if (!this.zshShell) { - completions.push(commandName); - } - else { - const desc = usageCommand[1] || ''; - completions.push(commandName.replace(/:/g, '\\:') + ':' + desc); - } - } - }); - } - } - optionCompletions(completions, args, argv, current) { - if ((current.match(/^-/) || (current === '' && completions.length === 0)) && - !this.previousArgHasChoices(args)) { - const options = this.yargs.getOptions(); - const positionalKeys = this.yargs.getGroups()[this.usage.getPositionalGroupName()] || []; - Object.keys(options.key).forEach(key => { - const negable = !!options.configuration['boolean-negation'] && - options.boolean.includes(key); - const isPositionalKey = positionalKeys.includes(key); - if (!isPositionalKey && - !options.hiddenOptions.includes(key) && - !this.argsContainKey(args, key, negable)) { - this.completeOptionKey(key, completions, current, negable && !!options.default[key]); - } - }); - } - } - choicesFromOptionsCompletions(completions, args, argv, current) { - if (this.previousArgHasChoices(args)) { - const choices = this.getPreviousArgChoices(args); - if (choices && choices.length > 0) { - completions.push(...choices.map(c => c.replace(/:/g, '\\:'))); - } - } - } - choicesFromPositionalsCompletions(completions, args, argv, current) { - if (current === '' && - completions.length > 0 && - this.previousArgHasChoices(args)) { - return; - } - const positionalKeys = this.yargs.getGroups()[this.usage.getPositionalGroupName()] || []; - const offset = Math.max(this.indexAfterLastReset, this.yargs.getInternalMethods().getContext().commands.length + - 1); - const positionalKey = positionalKeys[argv._.length - offset - 1]; - if (!positionalKey) { - return; - } - const choices = this.yargs.getOptions().choices[positionalKey] || []; - for (const choice of choices) { - if (choice.startsWith(current)) { - completions.push(choice.replace(/:/g, '\\:')); - } - } - } - getPreviousArgChoices(args) { - if (args.length < 1) - return; - let previousArg = args[args.length - 1]; - let filter = ''; - if (!previousArg.startsWith('-') && args.length > 1) { - filter = previousArg; - previousArg = args[args.length - 2]; - } - if (!previousArg.startsWith('-')) - return; - const previousArgKey = previousArg.replace(/^-+/, ''); - const options = this.yargs.getOptions(); - const possibleAliases = [ - previousArgKey, - ...(this.yargs.getAliases()[previousArgKey] || []), - ]; - let choices; - for (const possibleAlias of possibleAliases) { - if (Object.prototype.hasOwnProperty.call(options.key, possibleAlias) && - Array.isArray(options.choices[possibleAlias])) { - choices = options.choices[possibleAlias]; - break; - } - } - if (choices) { - return choices.filter(choice => !filter || choice.startsWith(filter)); - } - } - previousArgHasChoices(args) { - const choices = this.getPreviousArgChoices(args); - return choices !== undefined && choices.length > 0; - } - argsContainKey(args, key, negable) { - const argsContains = (s) => args.indexOf((/^[^0-9]$/.test(s) ? '-' : '--') + s) !== -1; - if (argsContains(key)) - return true; - if (negable && argsContains(`no-${key}`)) - return true; - if (this.aliases) { - for (const alias of this.aliases[key]) { - if (argsContains(alias)) - return true; - } - } - return false; - } - completeOptionKey(key, completions, current, negable) { - var _a, _b, _c, _d; - let keyWithDesc = key; - if (this.zshShell) { - const descs = this.usage.getDescriptions(); - const aliasKey = (_b = (_a = this === null || this === void 0 ? void 0 : this.aliases) === null || _a === void 0 ? void 0 : _a[key]) === null || _b === void 0 ? void 0 : _b.find(alias => { - const desc = descs[alias]; - return typeof desc === 'string' && desc.length > 0; - }); - const descFromAlias = aliasKey ? descs[aliasKey] : undefined; - const desc = (_d = (_c = descs[key]) !== null && _c !== void 0 ? _c : descFromAlias) !== null && _d !== void 0 ? _d : ''; - keyWithDesc = `${key.replace(/:/g, '\\:')}:${desc - .replace('__yargsString__:', '') - .replace(/(\r\n|\n|\r)/gm, ' ')}`; - } - const startsByTwoDashes = (s) => /^--/.test(s); - const isShortOption = (s) => /^[^0-9]$/.test(s); - const dashes = !startsByTwoDashes(current) && isShortOption(key) ? '-' : '--'; - completions.push(dashes + keyWithDesc); - if (negable) { - completions.push(dashes + 'no-' + keyWithDesc); - } - } - customCompletion(args, argv, current, done) { - assertNotStrictEqual(this.customCompletionFunction, null, this.shim); - if (isSyncCompletionFunction(this.customCompletionFunction)) { - const result = this.customCompletionFunction(current, argv); - if (isPromise(result)) { - return result - .then(list => { - this.shim.process.nextTick(() => { - done(null, list); - }); - }) - .catch(err => { - this.shim.process.nextTick(() => { - done(err, undefined); - }); - }); - } - return done(null, result); - } - else if (isFallbackCompletionFunction(this.customCompletionFunction)) { - return this.customCompletionFunction(current, argv, (onCompleted = done) => this.defaultCompletion(args, argv, current, onCompleted), completions => { - done(null, completions); - }); - } - else { - return this.customCompletionFunction(current, argv, completions => { - done(null, completions); - }); - } - } - getCompletion(args, done) { - const current = args.length ? args[args.length - 1] : ''; - const argv = this.yargs.parse(args, true); - const completionFunction = this.customCompletionFunction - ? (argv) => this.customCompletion(args, argv, current, done) - : (argv) => this.defaultCompletion(args, argv, current, done); - return isPromise(argv) - ? argv.then(completionFunction) - : completionFunction(argv); - } - generateCompletionScript($0, cmd) { - let script = this.zshShell - ? templates.completionZshTemplate - : templates.completionShTemplate; - const name = this.shim.path.basename($0); - if ($0.match(/\.js$/)) - $0 = `./${$0}`; - script = script.replace(/{{app_name}}/g, name); - script = script.replace(/{{completion_command}}/g, cmd); - return script.replace(/{{app_path}}/g, $0); - } - registerFunction(fn) { - this.customCompletionFunction = fn; - } - setParsed(parsed) { - this.aliases = parsed.aliases; - } -} -export function completion(yargs, usage, command, shim) { - return new Completion(yargs, usage, command, shim); -} -function isSyncCompletionFunction(completionFunction) { - return completionFunction.length < 3; -} -function isFallbackCompletionFunction(completionFunction) { - return completionFunction.length > 3; -} diff --git a/node_modules/yargs/build/lib/middleware.js b/node_modules/yargs/build/lib/middleware.js deleted file mode 100644 index 4e561a7..0000000 --- a/node_modules/yargs/build/lib/middleware.js +++ /dev/null @@ -1,88 +0,0 @@ -import { argsert } from './argsert.js'; -import { isPromise } from './utils/is-promise.js'; -export class GlobalMiddleware { - constructor(yargs) { - this.globalMiddleware = []; - this.frozens = []; - this.yargs = yargs; - } - addMiddleware(callback, applyBeforeValidation, global = true, mutates = false) { - argsert(' [boolean] [boolean] [boolean]', [callback, applyBeforeValidation, global], arguments.length); - if (Array.isArray(callback)) { - for (let i = 0; i < callback.length; i++) { - if (typeof callback[i] !== 'function') { - throw Error('middleware must be a function'); - } - const m = callback[i]; - m.applyBeforeValidation = applyBeforeValidation; - m.global = global; - } - Array.prototype.push.apply(this.globalMiddleware, callback); - } - else if (typeof callback === 'function') { - const m = callback; - m.applyBeforeValidation = applyBeforeValidation; - m.global = global; - m.mutates = mutates; - this.globalMiddleware.push(callback); - } - return this.yargs; - } - addCoerceMiddleware(callback, option) { - const aliases = this.yargs.getAliases(); - this.globalMiddleware = this.globalMiddleware.filter(m => { - const toCheck = [...(aliases[option] || []), option]; - if (!m.option) - return true; - else - return !toCheck.includes(m.option); - }); - callback.option = option; - return this.addMiddleware(callback, true, true, true); - } - getMiddleware() { - return this.globalMiddleware; - } - freeze() { - this.frozens.push([...this.globalMiddleware]); - } - unfreeze() { - const frozen = this.frozens.pop(); - if (frozen !== undefined) - this.globalMiddleware = frozen; - } - reset() { - this.globalMiddleware = this.globalMiddleware.filter(m => m.global); - } -} -export function commandMiddlewareFactory(commandMiddleware) { - if (!commandMiddleware) - return []; - return commandMiddleware.map(middleware => { - middleware.applyBeforeValidation = false; - return middleware; - }); -} -export function applyMiddleware(argv, yargs, middlewares, beforeValidation) { - return middlewares.reduce((acc, middleware) => { - if (middleware.applyBeforeValidation !== beforeValidation) { - return acc; - } - if (middleware.mutates) { - if (middleware.applied) - return acc; - middleware.applied = true; - } - if (isPromise(acc)) { - return acc - .then(initialObj => Promise.all([initialObj, middleware(initialObj, yargs)])) - .then(([initialObj, middlewareObj]) => Object.assign(initialObj, middlewareObj)); - } - else { - const result = middleware(acc, yargs); - return isPromise(result) - ? result.then(middlewareObj => Object.assign(acc, middlewareObj)) - : Object.assign(acc, result); - } - }, argv); -} diff --git a/node_modules/yargs/build/lib/parse-command.js b/node_modules/yargs/build/lib/parse-command.js deleted file mode 100644 index 4989f53..0000000 --- a/node_modules/yargs/build/lib/parse-command.js +++ /dev/null @@ -1,32 +0,0 @@ -export function parseCommand(cmd) { - const extraSpacesStrippedCommand = cmd.replace(/\s{2,}/g, ' '); - const splitCommand = extraSpacesStrippedCommand.split(/\s+(?![^[]*]|[^<]*>)/); - const bregex = /\.*[\][<>]/g; - const firstCommand = splitCommand.shift(); - if (!firstCommand) - throw new Error(`No command found in: ${cmd}`); - const parsedCommand = { - cmd: firstCommand.replace(bregex, ''), - demanded: [], - optional: [], - }; - splitCommand.forEach((cmd, i) => { - let variadic = false; - cmd = cmd.replace(/\s/g, ''); - if (/\.+[\]>]/.test(cmd) && i === splitCommand.length - 1) - variadic = true; - if (/^\[/.test(cmd)) { - parsedCommand.optional.push({ - cmd: cmd.replace(bregex, '').split('|'), - variadic, - }); - } - else { - parsedCommand.demanded.push({ - cmd: cmd.replace(bregex, '').split('|'), - variadic, - }); - } - }); - return parsedCommand; -} diff --git a/node_modules/yargs/build/lib/typings/common-types.js b/node_modules/yargs/build/lib/typings/common-types.js deleted file mode 100644 index 73e1773..0000000 --- a/node_modules/yargs/build/lib/typings/common-types.js +++ /dev/null @@ -1,9 +0,0 @@ -export function assertNotStrictEqual(actual, expected, shim, message) { - shim.assert.notStrictEqual(actual, expected, message); -} -export function assertSingleKey(actual, shim) { - shim.assert.strictEqual(typeof actual, 'string'); -} -export function objectKeys(object) { - return Object.keys(object); -} diff --git a/node_modules/yargs/build/lib/typings/yargs-parser-types.js b/node_modules/yargs/build/lib/typings/yargs-parser-types.js deleted file mode 100644 index cb0ff5c..0000000 --- a/node_modules/yargs/build/lib/typings/yargs-parser-types.js +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/node_modules/yargs/build/lib/usage.js b/node_modules/yargs/build/lib/usage.js deleted file mode 100644 index 0127c13..0000000 --- a/node_modules/yargs/build/lib/usage.js +++ /dev/null @@ -1,584 +0,0 @@ -import { objFilter } from './utils/obj-filter.js'; -import { YError } from './yerror.js'; -import setBlocking from './utils/set-blocking.js'; -function isBoolean(fail) { - return typeof fail === 'boolean'; -} -export function usage(yargs, shim) { - const __ = shim.y18n.__; - const self = {}; - const fails = []; - self.failFn = function failFn(f) { - fails.push(f); - }; - let failMessage = null; - let globalFailMessage = null; - let showHelpOnFail = true; - self.showHelpOnFail = function showHelpOnFailFn(arg1 = true, arg2) { - const [enabled, message] = typeof arg1 === 'string' ? [true, arg1] : [arg1, arg2]; - if (yargs.getInternalMethods().isGlobalContext()) { - globalFailMessage = message; - } - failMessage = message; - showHelpOnFail = enabled; - return self; - }; - let failureOutput = false; - self.fail = function fail(msg, err) { - const logger = yargs.getInternalMethods().getLoggerInstance(); - if (fails.length) { - for (let i = fails.length - 1; i >= 0; --i) { - const fail = fails[i]; - if (isBoolean(fail)) { - if (err) - throw err; - else if (msg) - throw Error(msg); - } - else { - fail(msg, err, self); - } - } - } - else { - if (yargs.getExitProcess()) - setBlocking(true); - if (!failureOutput) { - failureOutput = true; - if (showHelpOnFail) { - yargs.showHelp('error'); - logger.error(); - } - if (msg || err) - logger.error(msg || err); - const globalOrCommandFailMessage = failMessage || globalFailMessage; - if (globalOrCommandFailMessage) { - if (msg || err) - logger.error(''); - logger.error(globalOrCommandFailMessage); - } - } - err = err || new YError(msg); - if (yargs.getExitProcess()) { - return yargs.exit(1); - } - else if (yargs.getInternalMethods().hasParseCallback()) { - return yargs.exit(1, err); - } - else { - throw err; - } - } - }; - let usages = []; - let usageDisabled = false; - self.usage = (msg, description) => { - if (msg === null) { - usageDisabled = true; - usages = []; - return self; - } - usageDisabled = false; - usages.push([msg, description || '']); - return self; - }; - self.getUsage = () => { - return usages; - }; - self.getUsageDisabled = () => { - return usageDisabled; - }; - self.getPositionalGroupName = () => { - return __('Positionals:'); - }; - let examples = []; - self.example = (cmd, description) => { - examples.push([cmd, description || '']); - }; - let commands = []; - self.command = function command(cmd, description, isDefault, aliases, deprecated = false) { - if (isDefault) { - commands = commands.map(cmdArray => { - cmdArray[2] = false; - return cmdArray; - }); - } - commands.push([cmd, description || '', isDefault, aliases, deprecated]); - }; - self.getCommands = () => commands; - let descriptions = {}; - self.describe = function describe(keyOrKeys, desc) { - if (Array.isArray(keyOrKeys)) { - keyOrKeys.forEach(k => { - self.describe(k, desc); - }); - } - else if (typeof keyOrKeys === 'object') { - Object.keys(keyOrKeys).forEach(k => { - self.describe(k, keyOrKeys[k]); - }); - } - else { - descriptions[keyOrKeys] = desc; - } - }; - self.getDescriptions = () => descriptions; - let epilogs = []; - self.epilog = msg => { - epilogs.push(msg); - }; - let wrapSet = false; - let wrap; - self.wrap = cols => { - wrapSet = true; - wrap = cols; - }; - self.getWrap = () => { - if (shim.getEnv('YARGS_DISABLE_WRAP')) { - return null; - } - if (!wrapSet) { - wrap = windowWidth(); - wrapSet = true; - } - return wrap; - }; - const deferY18nLookupPrefix = '__yargsString__:'; - self.deferY18nLookup = str => deferY18nLookupPrefix + str; - self.help = function help() { - if (cachedHelpMessage) - return cachedHelpMessage; - normalizeAliases(); - const base$0 = yargs.customScriptName - ? yargs.$0 - : shim.path.basename(yargs.$0); - const demandedOptions = yargs.getDemandedOptions(); - const demandedCommands = yargs.getDemandedCommands(); - const deprecatedOptions = yargs.getDeprecatedOptions(); - const groups = yargs.getGroups(); - const options = yargs.getOptions(); - let keys = []; - keys = keys.concat(Object.keys(descriptions)); - keys = keys.concat(Object.keys(demandedOptions)); - keys = keys.concat(Object.keys(demandedCommands)); - keys = keys.concat(Object.keys(options.default)); - keys = keys.filter(filterHiddenOptions); - keys = Object.keys(keys.reduce((acc, key) => { - if (key !== '_') - acc[key] = true; - return acc; - }, {})); - const theWrap = self.getWrap(); - const ui = shim.cliui({ - width: theWrap, - wrap: !!theWrap, - }); - if (!usageDisabled) { - if (usages.length) { - usages.forEach(usage => { - ui.div({ text: `${usage[0].replace(/\$0/g, base$0)}` }); - if (usage[1]) { - ui.div({ text: `${usage[1]}`, padding: [1, 0, 0, 0] }); - } - }); - ui.div(); - } - else if (commands.length) { - let u = null; - if (demandedCommands._) { - u = `${base$0} <${__('command')}>\n`; - } - else { - u = `${base$0} [${__('command')}]\n`; - } - ui.div(`${u}`); - } - } - if (commands.length > 1 || (commands.length === 1 && !commands[0][2])) { - ui.div(__('Commands:')); - const context = yargs.getInternalMethods().getContext(); - const parentCommands = context.commands.length - ? `${context.commands.join(' ')} ` - : ''; - if (yargs.getInternalMethods().getParserConfiguration()['sort-commands'] === - true) { - commands = commands.sort((a, b) => a[0].localeCompare(b[0])); - } - const prefix = base$0 ? `${base$0} ` : ''; - commands.forEach(command => { - const commandString = `${prefix}${parentCommands}${command[0].replace(/^\$0 ?/, '')}`; - ui.span({ - text: commandString, - padding: [0, 2, 0, 2], - width: maxWidth(commands, theWrap, `${base$0}${parentCommands}`) + 4, - }, { text: command[1] }); - const hints = []; - if (command[2]) - hints.push(`[${__('default')}]`); - if (command[3] && command[3].length) { - hints.push(`[${__('aliases:')} ${command[3].join(', ')}]`); - } - if (command[4]) { - if (typeof command[4] === 'string') { - hints.push(`[${__('deprecated: %s', command[4])}]`); - } - else { - hints.push(`[${__('deprecated')}]`); - } - } - if (hints.length) { - ui.div({ - text: hints.join(' '), - padding: [0, 0, 0, 2], - align: 'right', - }); - } - else { - ui.div(); - } - }); - ui.div(); - } - const aliasKeys = (Object.keys(options.alias) || []).concat(Object.keys(yargs.parsed.newAliases) || []); - keys = keys.filter(key => !yargs.parsed.newAliases[key] && - aliasKeys.every(alias => (options.alias[alias] || []).indexOf(key) === -1)); - const defaultGroup = __('Options:'); - if (!groups[defaultGroup]) - groups[defaultGroup] = []; - addUngroupedKeys(keys, options.alias, groups, defaultGroup); - const isLongSwitch = (sw) => /^--/.test(getText(sw)); - const displayedGroups = Object.keys(groups) - .filter(groupName => groups[groupName].length > 0) - .map(groupName => { - const normalizedKeys = groups[groupName] - .filter(filterHiddenOptions) - .map(key => { - if (aliasKeys.includes(key)) - return key; - for (let i = 0, aliasKey; (aliasKey = aliasKeys[i]) !== undefined; i++) { - if ((options.alias[aliasKey] || []).includes(key)) - return aliasKey; - } - return key; - }); - return { groupName, normalizedKeys }; - }) - .filter(({ normalizedKeys }) => normalizedKeys.length > 0) - .map(({ groupName, normalizedKeys }) => { - const switches = normalizedKeys.reduce((acc, key) => { - acc[key] = [key] - .concat(options.alias[key] || []) - .map(sw => { - if (groupName === self.getPositionalGroupName()) - return sw; - else { - return ((/^[0-9]$/.test(sw) - ? options.boolean.includes(key) - ? '-' - : '--' - : sw.length > 1 - ? '--' - : '-') + sw); - } - }) - .sort((sw1, sw2) => isLongSwitch(sw1) === isLongSwitch(sw2) - ? 0 - : isLongSwitch(sw1) - ? 1 - : -1) - .join(', '); - return acc; - }, {}); - return { groupName, normalizedKeys, switches }; - }); - const shortSwitchesUsed = displayedGroups - .filter(({ groupName }) => groupName !== self.getPositionalGroupName()) - .some(({ normalizedKeys, switches }) => !normalizedKeys.every(key => isLongSwitch(switches[key]))); - if (shortSwitchesUsed) { - displayedGroups - .filter(({ groupName }) => groupName !== self.getPositionalGroupName()) - .forEach(({ normalizedKeys, switches }) => { - normalizedKeys.forEach(key => { - if (isLongSwitch(switches[key])) { - switches[key] = addIndentation(switches[key], '-x, '.length); - } - }); - }); - } - displayedGroups.forEach(({ groupName, normalizedKeys, switches }) => { - ui.div(groupName); - normalizedKeys.forEach(key => { - const kswitch = switches[key]; - let desc = descriptions[key] || ''; - let type = null; - if (desc.includes(deferY18nLookupPrefix)) - desc = __(desc.substring(deferY18nLookupPrefix.length)); - if (options.boolean.includes(key)) - type = `[${__('boolean')}]`; - if (options.count.includes(key)) - type = `[${__('count')}]`; - if (options.string.includes(key)) - type = `[${__('string')}]`; - if (options.normalize.includes(key)) - type = `[${__('string')}]`; - if (options.array.includes(key)) - type = `[${__('array')}]`; - if (options.number.includes(key)) - type = `[${__('number')}]`; - const deprecatedExtra = (deprecated) => typeof deprecated === 'string' - ? `[${__('deprecated: %s', deprecated)}]` - : `[${__('deprecated')}]`; - const extra = [ - key in deprecatedOptions - ? deprecatedExtra(deprecatedOptions[key]) - : null, - type, - key in demandedOptions ? `[${__('required')}]` : null, - options.choices && options.choices[key] - ? `[${__('choices:')} ${self.stringifiedValues(options.choices[key])}]` - : null, - defaultString(options.default[key], options.defaultDescription[key]), - ] - .filter(Boolean) - .join(' '); - ui.span({ - text: getText(kswitch), - padding: [0, 2, 0, 2 + getIndentation(kswitch)], - width: maxWidth(switches, theWrap) + 4, - }, desc); - const shouldHideOptionExtras = yargs.getInternalMethods().getUsageConfiguration()['hide-types'] === - true; - if (extra && !shouldHideOptionExtras) - ui.div({ text: extra, padding: [0, 0, 0, 2], align: 'right' }); - else - ui.div(); - }); - ui.div(); - }); - if (examples.length) { - ui.div(__('Examples:')); - examples.forEach(example => { - example[0] = example[0].replace(/\$0/g, base$0); - }); - examples.forEach(example => { - if (example[1] === '') { - ui.div({ - text: example[0], - padding: [0, 2, 0, 2], - }); - } - else { - ui.div({ - text: example[0], - padding: [0, 2, 0, 2], - width: maxWidth(examples, theWrap) + 4, - }, { - text: example[1], - }); - } - }); - ui.div(); - } - if (epilogs.length > 0) { - const e = epilogs - .map(epilog => epilog.replace(/\$0/g, base$0)) - .join('\n'); - ui.div(`${e}\n`); - } - return ui.toString().replace(/\s*$/, ''); - }; - function maxWidth(table, theWrap, modifier) { - let width = 0; - if (!Array.isArray(table)) { - table = Object.values(table).map(v => [v]); - } - table.forEach(v => { - width = Math.max(shim.stringWidth(modifier ? `${modifier} ${getText(v[0])}` : getText(v[0])) + getIndentation(v[0]), width); - }); - if (theWrap) - width = Math.min(width, parseInt((theWrap * 0.5).toString(), 10)); - return width; - } - function normalizeAliases() { - const demandedOptions = yargs.getDemandedOptions(); - const options = yargs.getOptions(); - (Object.keys(options.alias) || []).forEach(key => { - options.alias[key].forEach(alias => { - if (descriptions[alias]) - self.describe(key, descriptions[alias]); - if (alias in demandedOptions) - yargs.demandOption(key, demandedOptions[alias]); - if (options.boolean.includes(alias)) - yargs.boolean(key); - if (options.count.includes(alias)) - yargs.count(key); - if (options.string.includes(alias)) - yargs.string(key); - if (options.normalize.includes(alias)) - yargs.normalize(key); - if (options.array.includes(alias)) - yargs.array(key); - if (options.number.includes(alias)) - yargs.number(key); - }); - }); - } - let cachedHelpMessage; - self.cacheHelpMessage = function () { - cachedHelpMessage = this.help(); - }; - self.clearCachedHelpMessage = function () { - cachedHelpMessage = undefined; - }; - self.hasCachedHelpMessage = function () { - return !!cachedHelpMessage; - }; - function addUngroupedKeys(keys, aliases, groups, defaultGroup) { - let groupedKeys = []; - let toCheck = null; - Object.keys(groups).forEach(group => { - groupedKeys = groupedKeys.concat(groups[group]); - }); - keys.forEach(key => { - toCheck = [key].concat(aliases[key]); - if (!toCheck.some(k => groupedKeys.indexOf(k) !== -1)) { - groups[defaultGroup].push(key); - } - }); - return groupedKeys; - } - function filterHiddenOptions(key) { - return (yargs.getOptions().hiddenOptions.indexOf(key) < 0 || - yargs.parsed.argv[yargs.getOptions().showHiddenOpt]); - } - self.showHelp = (level) => { - const logger = yargs.getInternalMethods().getLoggerInstance(); - if (!level) - level = 'error'; - const emit = typeof level === 'function' ? level : logger[level]; - emit(self.help()); - }; - self.functionDescription = fn => { - const description = fn.name - ? shim.Parser.decamelize(fn.name, '-') - : __('generated-value'); - return ['(', description, ')'].join(''); - }; - self.stringifiedValues = function stringifiedValues(values, separator) { - let string = ''; - const sep = separator || ', '; - const array = [].concat(values); - if (!values || !array.length) - return string; - array.forEach(value => { - if (string.length) - string += sep; - string += JSON.stringify(value); - }); - return string; - }; - function defaultString(value, defaultDescription) { - let string = `[${__('default:')} `; - if (value === undefined && !defaultDescription) - return null; - if (defaultDescription) { - string += defaultDescription; - } - else { - switch (typeof value) { - case 'string': - string += `"${value}"`; - break; - case 'object': - string += JSON.stringify(value); - break; - default: - string += value; - } - } - return `${string}]`; - } - function windowWidth() { - const maxWidth = 80; - if (shim.process.stdColumns) { - return Math.min(maxWidth, shim.process.stdColumns); - } - else { - return maxWidth; - } - } - let version = null; - self.version = ver => { - version = ver; - }; - self.showVersion = level => { - const logger = yargs.getInternalMethods().getLoggerInstance(); - if (!level) - level = 'error'; - const emit = typeof level === 'function' ? level : logger[level]; - emit(version); - }; - self.reset = function reset(localLookup) { - failMessage = null; - failureOutput = false; - usages = []; - usageDisabled = false; - epilogs = []; - examples = []; - commands = []; - descriptions = objFilter(descriptions, k => !localLookup[k]); - return self; - }; - const frozens = []; - self.freeze = function freeze() { - frozens.push({ - failMessage, - failureOutput, - usages, - usageDisabled, - epilogs, - examples, - commands, - descriptions, - }); - }; - self.unfreeze = function unfreeze(defaultCommand = false) { - const frozen = frozens.pop(); - if (!frozen) - return; - if (defaultCommand) { - descriptions = { ...frozen.descriptions, ...descriptions }; - commands = [...frozen.commands, ...commands]; - usages = [...frozen.usages, ...usages]; - examples = [...frozen.examples, ...examples]; - epilogs = [...frozen.epilogs, ...epilogs]; - } - else { - ({ - failMessage, - failureOutput, - usages, - usageDisabled, - epilogs, - examples, - commands, - descriptions, - } = frozen); - } - }; - return self; -} -function isIndentedText(text) { - return typeof text === 'object'; -} -function addIndentation(text, indent) { - return isIndentedText(text) - ? { text: text.text, indentation: text.indentation + indent } - : { text, indentation: indent }; -} -function getIndentation(text) { - return isIndentedText(text) ? text.indentation : 0; -} -function getText(text) { - return isIndentedText(text) ? text.text : text; -} diff --git a/node_modules/yargs/build/lib/utils/apply-extends.js b/node_modules/yargs/build/lib/utils/apply-extends.js deleted file mode 100644 index 0e593b4..0000000 --- a/node_modules/yargs/build/lib/utils/apply-extends.js +++ /dev/null @@ -1,59 +0,0 @@ -import { YError } from '../yerror.js'; -let previouslyVisitedConfigs = []; -let shim; -export function applyExtends(config, cwd, mergeExtends, _shim) { - shim = _shim; - let defaultConfig = {}; - if (Object.prototype.hasOwnProperty.call(config, 'extends')) { - if (typeof config.extends !== 'string') - return defaultConfig; - const isPath = /\.json|\..*rc$/.test(config.extends); - let pathToDefault = null; - if (!isPath) { - try { - pathToDefault = require.resolve(config.extends); - } - catch (_err) { - return config; - } - } - else { - pathToDefault = getPathToDefaultConfig(cwd, config.extends); - } - checkForCircularExtends(pathToDefault); - previouslyVisitedConfigs.push(pathToDefault); - defaultConfig = isPath - ? JSON.parse(shim.readFileSync(pathToDefault, 'utf8')) - : require(config.extends); - delete config.extends; - defaultConfig = applyExtends(defaultConfig, shim.path.dirname(pathToDefault), mergeExtends, shim); - } - previouslyVisitedConfigs = []; - return mergeExtends - ? mergeDeep(defaultConfig, config) - : Object.assign({}, defaultConfig, config); -} -function checkForCircularExtends(cfgPath) { - if (previouslyVisitedConfigs.indexOf(cfgPath) > -1) { - throw new YError(`Circular extended configurations: '${cfgPath}'.`); - } -} -function getPathToDefaultConfig(cwd, pathToExtend) { - return shim.path.resolve(cwd, pathToExtend); -} -function mergeDeep(config1, config2) { - const target = {}; - function isObject(obj) { - return obj && typeof obj === 'object' && !Array.isArray(obj); - } - Object.assign(target, config1); - for (const key of Object.keys(config2)) { - if (isObject(config2[key]) && isObject(target[key])) { - target[key] = mergeDeep(config1[key], config2[key]); - } - else { - target[key] = config2[key]; - } - } - return target; -} diff --git a/node_modules/yargs/build/lib/utils/is-promise.js b/node_modules/yargs/build/lib/utils/is-promise.js deleted file mode 100644 index d250c08..0000000 --- a/node_modules/yargs/build/lib/utils/is-promise.js +++ /dev/null @@ -1,5 +0,0 @@ -export function isPromise(maybePromise) { - return (!!maybePromise && - !!maybePromise.then && - typeof maybePromise.then === 'function'); -} diff --git a/node_modules/yargs/build/lib/utils/levenshtein.js b/node_modules/yargs/build/lib/utils/levenshtein.js deleted file mode 100644 index 60575ef..0000000 --- a/node_modules/yargs/build/lib/utils/levenshtein.js +++ /dev/null @@ -1,34 +0,0 @@ -export function levenshtein(a, b) { - if (a.length === 0) - return b.length; - if (b.length === 0) - return a.length; - const matrix = []; - let i; - for (i = 0; i <= b.length; i++) { - matrix[i] = [i]; - } - let j; - for (j = 0; j <= a.length; j++) { - matrix[0][j] = j; - } - for (i = 1; i <= b.length; i++) { - for (j = 1; j <= a.length; j++) { - if (b.charAt(i - 1) === a.charAt(j - 1)) { - matrix[i][j] = matrix[i - 1][j - 1]; - } - else { - if (i > 1 && - j > 1 && - b.charAt(i - 2) === a.charAt(j - 1) && - b.charAt(i - 1) === a.charAt(j - 2)) { - matrix[i][j] = matrix[i - 2][j - 2] + 1; - } - else { - matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, Math.min(matrix[i][j - 1] + 1, matrix[i - 1][j] + 1)); - } - } - } - } - return matrix[b.length][a.length]; -} diff --git a/node_modules/yargs/build/lib/utils/maybe-async-result.js b/node_modules/yargs/build/lib/utils/maybe-async-result.js deleted file mode 100644 index 8c6a40c..0000000 --- a/node_modules/yargs/build/lib/utils/maybe-async-result.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isPromise } from './is-promise.js'; -export function maybeAsyncResult(getResult, resultHandler, errorHandler = (err) => { - throw err; -}) { - try { - const result = isFunction(getResult) ? getResult() : getResult; - return isPromise(result) - ? result.then((result) => resultHandler(result)) - : resultHandler(result); - } - catch (err) { - return errorHandler(err); - } -} -function isFunction(arg) { - return typeof arg === 'function'; -} diff --git a/node_modules/yargs/build/lib/utils/obj-filter.js b/node_modules/yargs/build/lib/utils/obj-filter.js deleted file mode 100644 index cd68ad2..0000000 --- a/node_modules/yargs/build/lib/utils/obj-filter.js +++ /dev/null @@ -1,10 +0,0 @@ -import { objectKeys } from '../typings/common-types.js'; -export function objFilter(original = {}, filter = () => true) { - const obj = {}; - objectKeys(original).forEach(key => { - if (filter(key, original[key])) { - obj[key] = original[key]; - } - }); - return obj; -} diff --git a/node_modules/yargs/build/lib/utils/process-argv.js b/node_modules/yargs/build/lib/utils/process-argv.js deleted file mode 100644 index 74dc9e4..0000000 --- a/node_modules/yargs/build/lib/utils/process-argv.js +++ /dev/null @@ -1,17 +0,0 @@ -function getProcessArgvBinIndex() { - if (isBundledElectronApp()) - return 0; - return 1; -} -function isBundledElectronApp() { - return isElectronApp() && !process.defaultApp; -} -function isElectronApp() { - return !!process.versions.electron; -} -export function hideBin(argv) { - return argv.slice(getProcessArgvBinIndex() + 1); -} -export function getProcessArgvBin() { - return process.argv[getProcessArgvBinIndex()]; -} diff --git a/node_modules/yargs/build/lib/utils/set-blocking.js b/node_modules/yargs/build/lib/utils/set-blocking.js deleted file mode 100644 index 88fb806..0000000 --- a/node_modules/yargs/build/lib/utils/set-blocking.js +++ /dev/null @@ -1,12 +0,0 @@ -export default function setBlocking(blocking) { - if (typeof process === 'undefined') - return; - [process.stdout, process.stderr].forEach(_stream => { - const stream = _stream; - if (stream._handle && - stream.isTTY && - typeof stream._handle.setBlocking === 'function') { - stream._handle.setBlocking(blocking); - } - }); -} diff --git a/node_modules/yargs/build/lib/utils/which-module.js b/node_modules/yargs/build/lib/utils/which-module.js deleted file mode 100644 index 5974e22..0000000 --- a/node_modules/yargs/build/lib/utils/which-module.js +++ /dev/null @@ -1,10 +0,0 @@ -export default function whichModule(exported) { - if (typeof require === 'undefined') - return null; - for (let i = 0, files = Object.keys(require.cache), mod; i < files.length; i++) { - mod = require.cache[files[i]]; - if (mod.exports === exported) - return mod; - } - return null; -} diff --git a/node_modules/yargs/build/lib/validation.js b/node_modules/yargs/build/lib/validation.js deleted file mode 100644 index bd2e1b8..0000000 --- a/node_modules/yargs/build/lib/validation.js +++ /dev/null @@ -1,305 +0,0 @@ -import { argsert } from './argsert.js'; -import { assertNotStrictEqual, } from './typings/common-types.js'; -import { levenshtein as distance } from './utils/levenshtein.js'; -import { objFilter } from './utils/obj-filter.js'; -const specialKeys = ['$0', '--', '_']; -export function validation(yargs, usage, shim) { - const __ = shim.y18n.__; - const __n = shim.y18n.__n; - const self = {}; - self.nonOptionCount = function nonOptionCount(argv) { - const demandedCommands = yargs.getDemandedCommands(); - const positionalCount = argv._.length + (argv['--'] ? argv['--'].length : 0); - const _s = positionalCount - yargs.getInternalMethods().getContext().commands.length; - if (demandedCommands._ && - (_s < demandedCommands._.min || _s > demandedCommands._.max)) { - if (_s < demandedCommands._.min) { - if (demandedCommands._.minMsg !== undefined) { - usage.fail(demandedCommands._.minMsg - ? demandedCommands._.minMsg - .replace(/\$0/g, _s.toString()) - .replace(/\$1/, demandedCommands._.min.toString()) - : null); - } - else { - usage.fail(__n('Not enough non-option arguments: got %s, need at least %s', 'Not enough non-option arguments: got %s, need at least %s', _s, _s.toString(), demandedCommands._.min.toString())); - } - } - else if (_s > demandedCommands._.max) { - if (demandedCommands._.maxMsg !== undefined) { - usage.fail(demandedCommands._.maxMsg - ? demandedCommands._.maxMsg - .replace(/\$0/g, _s.toString()) - .replace(/\$1/, demandedCommands._.max.toString()) - : null); - } - else { - usage.fail(__n('Too many non-option arguments: got %s, maximum of %s', 'Too many non-option arguments: got %s, maximum of %s', _s, _s.toString(), demandedCommands._.max.toString())); - } - } - } - }; - self.positionalCount = function positionalCount(required, observed) { - if (observed < required) { - usage.fail(__n('Not enough non-option arguments: got %s, need at least %s', 'Not enough non-option arguments: got %s, need at least %s', observed, observed + '', required + '')); - } - }; - self.requiredArguments = function requiredArguments(argv, demandedOptions) { - let missing = null; - for (const key of Object.keys(demandedOptions)) { - if (!Object.prototype.hasOwnProperty.call(argv, key) || - typeof argv[key] === 'undefined') { - missing = missing || {}; - missing[key] = demandedOptions[key]; - } - } - if (missing) { - const customMsgs = []; - for (const key of Object.keys(missing)) { - const msg = missing[key]; - if (msg && customMsgs.indexOf(msg) < 0) { - customMsgs.push(msg); - } - } - const customMsg = customMsgs.length ? `\n${customMsgs.join('\n')}` : ''; - usage.fail(__n('Missing required argument: %s', 'Missing required arguments: %s', Object.keys(missing).length, Object.keys(missing).join(', ') + customMsg)); - } - }; - self.unknownArguments = function unknownArguments(argv, aliases, positionalMap, isDefaultCommand, checkPositionals = true) { - var _a; - const commandKeys = yargs - .getInternalMethods() - .getCommandInstance() - .getCommands(); - const unknown = []; - const currentContext = yargs.getInternalMethods().getContext(); - Object.keys(argv).forEach(key => { - if (!specialKeys.includes(key) && - !Object.prototype.hasOwnProperty.call(positionalMap, key) && - !Object.prototype.hasOwnProperty.call(yargs.getInternalMethods().getParseContext(), key) && - !self.isValidAndSomeAliasIsNotNew(key, aliases)) { - unknown.push(key); - } - }); - if (checkPositionals && - (currentContext.commands.length > 0 || - commandKeys.length > 0 || - isDefaultCommand)) { - argv._.slice(currentContext.commands.length).forEach(key => { - if (!commandKeys.includes('' + key)) { - unknown.push('' + key); - } - }); - } - if (checkPositionals) { - const demandedCommands = yargs.getDemandedCommands(); - const maxNonOptDemanded = ((_a = demandedCommands._) === null || _a === void 0 ? void 0 : _a.max) || 0; - const expected = currentContext.commands.length + maxNonOptDemanded; - if (expected < argv._.length) { - argv._.slice(expected).forEach(key => { - key = String(key); - if (!currentContext.commands.includes(key) && - !unknown.includes(key)) { - unknown.push(key); - } - }); - } - } - if (unknown.length) { - usage.fail(__n('Unknown argument: %s', 'Unknown arguments: %s', unknown.length, unknown.map(s => (s.trim() ? s : `"${s}"`)).join(', '))); - } - }; - self.unknownCommands = function unknownCommands(argv) { - const commandKeys = yargs - .getInternalMethods() - .getCommandInstance() - .getCommands(); - const unknown = []; - const currentContext = yargs.getInternalMethods().getContext(); - if (currentContext.commands.length > 0 || commandKeys.length > 0) { - argv._.slice(currentContext.commands.length).forEach(key => { - if (!commandKeys.includes('' + key)) { - unknown.push('' + key); - } - }); - } - if (unknown.length > 0) { - usage.fail(__n('Unknown command: %s', 'Unknown commands: %s', unknown.length, unknown.join(', '))); - return true; - } - else { - return false; - } - }; - self.isValidAndSomeAliasIsNotNew = function isValidAndSomeAliasIsNotNew(key, aliases) { - if (!Object.prototype.hasOwnProperty.call(aliases, key)) { - return false; - } - const newAliases = yargs.parsed.newAliases; - return [key, ...aliases[key]].some(a => !Object.prototype.hasOwnProperty.call(newAliases, a) || !newAliases[key]); - }; - self.limitedChoices = function limitedChoices(argv) { - const options = yargs.getOptions(); - const invalid = {}; - if (!Object.keys(options.choices).length) - return; - Object.keys(argv).forEach(key => { - if (specialKeys.indexOf(key) === -1 && - Object.prototype.hasOwnProperty.call(options.choices, key)) { - [].concat(argv[key]).forEach(value => { - if (options.choices[key].indexOf(value) === -1 && - value !== undefined) { - invalid[key] = (invalid[key] || []).concat(value); - } - }); - } - }); - const invalidKeys = Object.keys(invalid); - if (!invalidKeys.length) - return; - let msg = __('Invalid values:'); - invalidKeys.forEach(key => { - msg += `\n ${__('Argument: %s, Given: %s, Choices: %s', key, usage.stringifiedValues(invalid[key]), usage.stringifiedValues(options.choices[key]))}`; - }); - usage.fail(msg); - }; - let implied = {}; - self.implies = function implies(key, value) { - argsert(' [array|number|string]', [key, value], arguments.length); - if (typeof key === 'object') { - Object.keys(key).forEach(k => { - self.implies(k, key[k]); - }); - } - else { - yargs.global(key); - if (!implied[key]) { - implied[key] = []; - } - if (Array.isArray(value)) { - value.forEach(i => self.implies(key, i)); - } - else { - assertNotStrictEqual(value, undefined, shim); - implied[key].push(value); - } - } - }; - self.getImplied = function getImplied() { - return implied; - }; - function keyExists(argv, val) { - const num = Number(val); - val = isNaN(num) ? val : num; - if (typeof val === 'number') { - val = argv._.length >= val; - } - else if (val.match(/^--no-.+/)) { - val = val.match(/^--no-(.+)/)[1]; - val = !Object.prototype.hasOwnProperty.call(argv, val); - } - else { - val = Object.prototype.hasOwnProperty.call(argv, val); - } - return val; - } - self.implications = function implications(argv) { - const implyFail = []; - Object.keys(implied).forEach(key => { - const origKey = key; - (implied[key] || []).forEach(value => { - let key = origKey; - const origValue = value; - key = keyExists(argv, key); - value = keyExists(argv, value); - if (key && !value) { - implyFail.push(` ${origKey} -> ${origValue}`); - } - }); - }); - if (implyFail.length) { - let msg = `${__('Implications failed:')}\n`; - implyFail.forEach(value => { - msg += value; - }); - usage.fail(msg); - } - }; - let conflicting = {}; - self.conflicts = function conflicts(key, value) { - argsert(' [array|string]', [key, value], arguments.length); - if (typeof key === 'object') { - Object.keys(key).forEach(k => { - self.conflicts(k, key[k]); - }); - } - else { - yargs.global(key); - if (!conflicting[key]) { - conflicting[key] = []; - } - if (Array.isArray(value)) { - value.forEach(i => self.conflicts(key, i)); - } - else { - conflicting[key].push(value); - } - } - }; - self.getConflicting = () => conflicting; - self.conflicting = function conflictingFn(argv) { - Object.keys(argv).forEach(key => { - if (conflicting[key]) { - conflicting[key].forEach(value => { - if (value && argv[key] !== undefined && argv[value] !== undefined) { - usage.fail(__('Arguments %s and %s are mutually exclusive', key, value)); - } - }); - } - }); - if (yargs.getInternalMethods().getParserConfiguration()['strip-dashed']) { - Object.keys(conflicting).forEach(key => { - conflicting[key].forEach(value => { - if (value && - argv[shim.Parser.camelCase(key)] !== undefined && - argv[shim.Parser.camelCase(value)] !== undefined) { - usage.fail(__('Arguments %s and %s are mutually exclusive', key, value)); - } - }); - }); - } - }; - self.recommendCommands = function recommendCommands(cmd, potentialCommands) { - const threshold = 3; - potentialCommands = potentialCommands.sort((a, b) => b.length - a.length); - let recommended = null; - let bestDistance = Infinity; - for (let i = 0, candidate; (candidate = potentialCommands[i]) !== undefined; i++) { - const d = distance(cmd, candidate); - if (d <= threshold && d < bestDistance) { - bestDistance = d; - recommended = candidate; - } - } - if (recommended) - usage.fail(__('Did you mean %s?', recommended)); - }; - self.reset = function reset(localLookup) { - implied = objFilter(implied, k => !localLookup[k]); - conflicting = objFilter(conflicting, k => !localLookup[k]); - return self; - }; - const frozens = []; - self.freeze = function freeze() { - frozens.push({ - implied, - conflicting, - }); - }; - self.unfreeze = function unfreeze() { - const frozen = frozens.pop(); - assertNotStrictEqual(frozen, undefined, shim); - ({ implied, conflicting } = frozen); - }; - return self; -} diff --git a/node_modules/yargs/build/lib/yargs-factory.js b/node_modules/yargs/build/lib/yargs-factory.js deleted file mode 100644 index c4b1d50..0000000 --- a/node_modules/yargs/build/lib/yargs-factory.js +++ /dev/null @@ -1,1512 +0,0 @@ -var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; -}; -var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -}; -var _YargsInstance_command, _YargsInstance_cwd, _YargsInstance_context, _YargsInstance_completion, _YargsInstance_completionCommand, _YargsInstance_defaultShowHiddenOpt, _YargsInstance_exitError, _YargsInstance_detectLocale, _YargsInstance_emittedWarnings, _YargsInstance_exitProcess, _YargsInstance_frozens, _YargsInstance_globalMiddleware, _YargsInstance_groups, _YargsInstance_hasOutput, _YargsInstance_helpOpt, _YargsInstance_isGlobalContext, _YargsInstance_logger, _YargsInstance_output, _YargsInstance_options, _YargsInstance_parentRequire, _YargsInstance_parserConfig, _YargsInstance_parseFn, _YargsInstance_parseContext, _YargsInstance_pkgs, _YargsInstance_preservedGroups, _YargsInstance_processArgs, _YargsInstance_recommendCommands, _YargsInstance_shim, _YargsInstance_strict, _YargsInstance_strictCommands, _YargsInstance_strictOptions, _YargsInstance_usage, _YargsInstance_usageConfig, _YargsInstance_versionOpt, _YargsInstance_validation; -import { command as Command, } from './command.js'; -import { assertNotStrictEqual, assertSingleKey, objectKeys, } from './typings/common-types.js'; -import { YError } from './yerror.js'; -import { usage as Usage } from './usage.js'; -import { argsert } from './argsert.js'; -import { completion as Completion, } from './completion.js'; -import { validation as Validation, } from './validation.js'; -import { objFilter } from './utils/obj-filter.js'; -import { applyExtends } from './utils/apply-extends.js'; -import { applyMiddleware, GlobalMiddleware, } from './middleware.js'; -import { isPromise } from './utils/is-promise.js'; -import { maybeAsyncResult } from './utils/maybe-async-result.js'; -import setBlocking from './utils/set-blocking.js'; -export function YargsFactory(_shim) { - return (processArgs = [], cwd = _shim.process.cwd(), parentRequire) => { - const yargs = new YargsInstance(processArgs, cwd, parentRequire, _shim); - Object.defineProperty(yargs, 'argv', { - get: () => { - return yargs.parse(); - }, - enumerable: true, - }); - yargs.help(); - yargs.version(); - return yargs; - }; -} -const kCopyDoubleDash = Symbol('copyDoubleDash'); -const kCreateLogger = Symbol('copyDoubleDash'); -const kDeleteFromParserHintObject = Symbol('deleteFromParserHintObject'); -const kEmitWarning = Symbol('emitWarning'); -const kFreeze = Symbol('freeze'); -const kGetDollarZero = Symbol('getDollarZero'); -const kGetParserConfiguration = Symbol('getParserConfiguration'); -const kGetUsageConfiguration = Symbol('getUsageConfiguration'); -const kGuessLocale = Symbol('guessLocale'); -const kGuessVersion = Symbol('guessVersion'); -const kParsePositionalNumbers = Symbol('parsePositionalNumbers'); -const kPkgUp = Symbol('pkgUp'); -const kPopulateParserHintArray = Symbol('populateParserHintArray'); -const kPopulateParserHintSingleValueDictionary = Symbol('populateParserHintSingleValueDictionary'); -const kPopulateParserHintArrayDictionary = Symbol('populateParserHintArrayDictionary'); -const kPopulateParserHintDictionary = Symbol('populateParserHintDictionary'); -const kSanitizeKey = Symbol('sanitizeKey'); -const kSetKey = Symbol('setKey'); -const kUnfreeze = Symbol('unfreeze'); -const kValidateAsync = Symbol('validateAsync'); -const kGetCommandInstance = Symbol('getCommandInstance'); -const kGetContext = Symbol('getContext'); -const kGetHasOutput = Symbol('getHasOutput'); -const kGetLoggerInstance = Symbol('getLoggerInstance'); -const kGetParseContext = Symbol('getParseContext'); -const kGetUsageInstance = Symbol('getUsageInstance'); -const kGetValidationInstance = Symbol('getValidationInstance'); -const kHasParseCallback = Symbol('hasParseCallback'); -const kIsGlobalContext = Symbol('isGlobalContext'); -const kPostProcess = Symbol('postProcess'); -const kRebase = Symbol('rebase'); -const kReset = Symbol('reset'); -const kRunYargsParserAndExecuteCommands = Symbol('runYargsParserAndExecuteCommands'); -const kRunValidation = Symbol('runValidation'); -const kSetHasOutput = Symbol('setHasOutput'); -const kTrackManuallySetKeys = Symbol('kTrackManuallySetKeys'); -export class YargsInstance { - constructor(processArgs = [], cwd, parentRequire, shim) { - this.customScriptName = false; - this.parsed = false; - _YargsInstance_command.set(this, void 0); - _YargsInstance_cwd.set(this, void 0); - _YargsInstance_context.set(this, { commands: [], fullCommands: [] }); - _YargsInstance_completion.set(this, null); - _YargsInstance_completionCommand.set(this, null); - _YargsInstance_defaultShowHiddenOpt.set(this, 'show-hidden'); - _YargsInstance_exitError.set(this, null); - _YargsInstance_detectLocale.set(this, true); - _YargsInstance_emittedWarnings.set(this, {}); - _YargsInstance_exitProcess.set(this, true); - _YargsInstance_frozens.set(this, []); - _YargsInstance_globalMiddleware.set(this, void 0); - _YargsInstance_groups.set(this, {}); - _YargsInstance_hasOutput.set(this, false); - _YargsInstance_helpOpt.set(this, null); - _YargsInstance_isGlobalContext.set(this, true); - _YargsInstance_logger.set(this, void 0); - _YargsInstance_output.set(this, ''); - _YargsInstance_options.set(this, void 0); - _YargsInstance_parentRequire.set(this, void 0); - _YargsInstance_parserConfig.set(this, {}); - _YargsInstance_parseFn.set(this, null); - _YargsInstance_parseContext.set(this, null); - _YargsInstance_pkgs.set(this, {}); - _YargsInstance_preservedGroups.set(this, {}); - _YargsInstance_processArgs.set(this, void 0); - _YargsInstance_recommendCommands.set(this, false); - _YargsInstance_shim.set(this, void 0); - _YargsInstance_strict.set(this, false); - _YargsInstance_strictCommands.set(this, false); - _YargsInstance_strictOptions.set(this, false); - _YargsInstance_usage.set(this, void 0); - _YargsInstance_usageConfig.set(this, {}); - _YargsInstance_versionOpt.set(this, null); - _YargsInstance_validation.set(this, void 0); - __classPrivateFieldSet(this, _YargsInstance_shim, shim, "f"); - __classPrivateFieldSet(this, _YargsInstance_processArgs, processArgs, "f"); - __classPrivateFieldSet(this, _YargsInstance_cwd, cwd, "f"); - __classPrivateFieldSet(this, _YargsInstance_parentRequire, parentRequire, "f"); - __classPrivateFieldSet(this, _YargsInstance_globalMiddleware, new GlobalMiddleware(this), "f"); - this.$0 = this[kGetDollarZero](); - this[kReset](); - __classPrivateFieldSet(this, _YargsInstance_command, __classPrivateFieldGet(this, _YargsInstance_command, "f"), "f"); - __classPrivateFieldSet(this, _YargsInstance_usage, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), "f"); - __classPrivateFieldSet(this, _YargsInstance_validation, __classPrivateFieldGet(this, _YargsInstance_validation, "f"), "f"); - __classPrivateFieldSet(this, _YargsInstance_options, __classPrivateFieldGet(this, _YargsInstance_options, "f"), "f"); - __classPrivateFieldGet(this, _YargsInstance_options, "f").showHiddenOpt = __classPrivateFieldGet(this, _YargsInstance_defaultShowHiddenOpt, "f"); - __classPrivateFieldSet(this, _YargsInstance_logger, this[kCreateLogger](), "f"); - } - addHelpOpt(opt, msg) { - const defaultHelpOpt = 'help'; - argsert('[string|boolean] [string]', [opt, msg], arguments.length); - if (__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")) { - this[kDeleteFromParserHintObject](__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")); - __classPrivateFieldSet(this, _YargsInstance_helpOpt, null, "f"); - } - if (opt === false && msg === undefined) - return this; - __classPrivateFieldSet(this, _YargsInstance_helpOpt, typeof opt === 'string' ? opt : defaultHelpOpt, "f"); - this.boolean(__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")); - this.describe(__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f"), msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show help')); - return this; - } - help(opt, msg) { - return this.addHelpOpt(opt, msg); - } - addShowHiddenOpt(opt, msg) { - argsert('[string|boolean] [string]', [opt, msg], arguments.length); - if (opt === false && msg === undefined) - return this; - const showHiddenOpt = typeof opt === 'string' ? opt : __classPrivateFieldGet(this, _YargsInstance_defaultShowHiddenOpt, "f"); - this.boolean(showHiddenOpt); - this.describe(showHiddenOpt, msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show hidden options')); - __classPrivateFieldGet(this, _YargsInstance_options, "f").showHiddenOpt = showHiddenOpt; - return this; - } - showHidden(opt, msg) { - return this.addShowHiddenOpt(opt, msg); - } - alias(key, value) { - argsert(' [string|array]', [key, value], arguments.length); - this[kPopulateParserHintArrayDictionary](this.alias.bind(this), 'alias', key, value); - return this; - } - array(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('array', keys); - this[kTrackManuallySetKeys](keys); - return this; - } - boolean(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('boolean', keys); - this[kTrackManuallySetKeys](keys); - return this; - } - check(f, global) { - argsert(' [boolean]', [f, global], arguments.length); - this.middleware((argv, _yargs) => { - return maybeAsyncResult(() => { - return f(argv, _yargs.getOptions()); - }, (result) => { - if (!result) { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(__classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.__('Argument check failed: %s', f.toString())); - } - else if (typeof result === 'string' || result instanceof Error) { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(result.toString(), result); - } - return argv; - }, (err) => { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(err.message ? err.message : err.toString(), err); - return argv; - }); - }, false, global); - return this; - } - choices(key, value) { - argsert(' [string|array]', [key, value], arguments.length); - this[kPopulateParserHintArrayDictionary](this.choices.bind(this), 'choices', key, value); - return this; - } - coerce(keys, value) { - argsert(' [function]', [keys, value], arguments.length); - if (Array.isArray(keys)) { - if (!value) { - throw new YError('coerce callback must be provided'); - } - for (const key of keys) { - this.coerce(key, value); - } - return this; - } - else if (typeof keys === 'object') { - for (const key of Object.keys(keys)) { - this.coerce(key, keys[key]); - } - return this; - } - if (!value) { - throw new YError('coerce callback must be provided'); - } - __classPrivateFieldGet(this, _YargsInstance_options, "f").key[keys] = true; - __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").addCoerceMiddleware((argv, yargs) => { - let aliases; - const shouldCoerce = Object.prototype.hasOwnProperty.call(argv, keys); - if (!shouldCoerce) { - return argv; - } - return maybeAsyncResult(() => { - aliases = yargs.getAliases(); - return value(argv[keys]); - }, (result) => { - argv[keys] = result; - const stripAliased = yargs - .getInternalMethods() - .getParserConfiguration()['strip-aliased']; - if (aliases[keys] && stripAliased !== true) { - for (const alias of aliases[keys]) { - argv[alias] = result; - } - } - return argv; - }, (err) => { - throw new YError(err.message); - }); - }, keys); - return this; - } - conflicts(key1, key2) { - argsert(' [string|array]', [key1, key2], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").conflicts(key1, key2); - return this; - } - config(key = 'config', msg, parseFn) { - argsert('[object|string] [string|function] [function]', [key, msg, parseFn], arguments.length); - if (typeof key === 'object' && !Array.isArray(key)) { - key = applyExtends(key, __classPrivateFieldGet(this, _YargsInstance_cwd, "f"), this[kGetParserConfiguration]()['deep-merge-config'] || false, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = (__classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []).concat(key); - return this; - } - if (typeof msg === 'function') { - parseFn = msg; - msg = undefined; - } - this.describe(key, msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Path to JSON config file')); - (Array.isArray(key) ? key : [key]).forEach(k => { - __classPrivateFieldGet(this, _YargsInstance_options, "f").config[k] = parseFn || true; - }); - return this; - } - completion(cmd, desc, fn) { - argsert('[string] [string|boolean|function] [function]', [cmd, desc, fn], arguments.length); - if (typeof desc === 'function') { - fn = desc; - desc = undefined; - } - __classPrivateFieldSet(this, _YargsInstance_completionCommand, cmd || __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") || 'completion', "f"); - if (!desc && desc !== false) { - desc = 'generate completion script'; - } - this.command(__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f"), desc); - if (fn) - __classPrivateFieldGet(this, _YargsInstance_completion, "f").registerFunction(fn); - return this; - } - command(cmd, description, builder, handler, middlewares, deprecated) { - argsert(' [string|boolean] [function|object] [function] [array] [boolean|string]', [cmd, description, builder, handler, middlewares, deprecated], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_command, "f").addHandler(cmd, description, builder, handler, middlewares, deprecated); - return this; - } - commands(cmd, description, builder, handler, middlewares, deprecated) { - return this.command(cmd, description, builder, handler, middlewares, deprecated); - } - commandDir(dir, opts) { - argsert(' [object]', [dir, opts], arguments.length); - const req = __classPrivateFieldGet(this, _YargsInstance_parentRequire, "f") || __classPrivateFieldGet(this, _YargsInstance_shim, "f").require; - __classPrivateFieldGet(this, _YargsInstance_command, "f").addDirectory(dir, req, __classPrivateFieldGet(this, _YargsInstance_shim, "f").getCallerFile(), opts); - return this; - } - count(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('count', keys); - this[kTrackManuallySetKeys](keys); - return this; - } - default(key, value, defaultDescription) { - argsert(' [*] [string]', [key, value, defaultDescription], arguments.length); - if (defaultDescription) { - assertSingleKey(key, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = defaultDescription; - } - if (typeof value === 'function') { - assertSingleKey(key, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - if (!__classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key]) - __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = - __classPrivateFieldGet(this, _YargsInstance_usage, "f").functionDescription(value); - value = value.call(); - } - this[kPopulateParserHintSingleValueDictionary](this.default.bind(this), 'default', key, value); - return this; - } - defaults(key, value, defaultDescription) { - return this.default(key, value, defaultDescription); - } - demandCommand(min = 1, max, minMsg, maxMsg) { - argsert('[number] [number|string] [string|null|undefined] [string|null|undefined]', [min, max, minMsg, maxMsg], arguments.length); - if (typeof max !== 'number') { - minMsg = max; - max = Infinity; - } - this.global('_', false); - __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedCommands._ = { - min, - max, - minMsg, - maxMsg, - }; - return this; - } - demand(keys, max, msg) { - if (Array.isArray(max)) { - max.forEach(key => { - assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - this.demandOption(key, msg); - }); - max = Infinity; - } - else if (typeof max !== 'number') { - msg = max; - max = Infinity; - } - if (typeof keys === 'number') { - assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - this.demandCommand(keys, max, msg, msg); - } - else if (Array.isArray(keys)) { - keys.forEach(key => { - assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - this.demandOption(key, msg); - }); - } - else { - if (typeof msg === 'string') { - this.demandOption(keys, msg); - } - else if (msg === true || typeof msg === 'undefined') { - this.demandOption(keys); - } - } - return this; - } - demandOption(keys, msg) { - argsert(' [string]', [keys, msg], arguments.length); - this[kPopulateParserHintSingleValueDictionary](this.demandOption.bind(this), 'demandedOptions', keys, msg); - return this; - } - deprecateOption(option, message) { - argsert(' [string|boolean]', [option, message], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_options, "f").deprecatedOptions[option] = message; - return this; - } - describe(keys, description) { - argsert(' [string]', [keys, description], arguments.length); - this[kSetKey](keys, true); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").describe(keys, description); - return this; - } - detectLocale(detect) { - argsert('', [detect], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_detectLocale, detect, "f"); - return this; - } - env(prefix) { - argsert('[string|boolean]', [prefix], arguments.length); - if (prefix === false) - delete __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix; - else - __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix = prefix || ''; - return this; - } - epilogue(msg) { - argsert('', [msg], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").epilog(msg); - return this; - } - epilog(msg) { - return this.epilogue(msg); - } - example(cmd, description) { - argsert(' [string]', [cmd, description], arguments.length); - if (Array.isArray(cmd)) { - cmd.forEach(exampleParams => this.example(...exampleParams)); - } - else { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").example(cmd, description); - } - return this; - } - exit(code, err) { - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - __classPrivateFieldSet(this, _YargsInstance_exitError, err, "f"); - if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) - __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.exit(code); - } - exitProcess(enabled = true) { - argsert('[boolean]', [enabled], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_exitProcess, enabled, "f"); - return this; - } - fail(f) { - argsert('', [f], arguments.length); - if (typeof f === 'boolean' && f !== false) { - throw new YError("Invalid first argument. Expected function or boolean 'false'"); - } - __classPrivateFieldGet(this, _YargsInstance_usage, "f").failFn(f); - return this; - } - getAliases() { - return this.parsed ? this.parsed.aliases : {}; - } - async getCompletion(args, done) { - argsert(' [function]', [args, done], arguments.length); - if (!done) { - return new Promise((resolve, reject) => { - __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(args, (err, completions) => { - if (err) - reject(err); - else - resolve(completions); - }); - }); - } - else { - return __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(args, done); - } - } - getDemandedOptions() { - argsert([], 0); - return __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedOptions; - } - getDemandedCommands() { - argsert([], 0); - return __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedCommands; - } - getDeprecatedOptions() { - argsert([], 0); - return __classPrivateFieldGet(this, _YargsInstance_options, "f").deprecatedOptions; - } - getDetectLocale() { - return __classPrivateFieldGet(this, _YargsInstance_detectLocale, "f"); - } - getExitProcess() { - return __classPrivateFieldGet(this, _YargsInstance_exitProcess, "f"); - } - getGroups() { - return Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_groups, "f"), __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")); - } - getHelp() { - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - if (!__classPrivateFieldGet(this, _YargsInstance_usage, "f").hasCachedHelpMessage()) { - if (!this.parsed) { - const parse = this[kRunYargsParserAndExecuteCommands](__classPrivateFieldGet(this, _YargsInstance_processArgs, "f"), undefined, undefined, 0, true); - if (isPromise(parse)) { - return parse.then(() => { - return __classPrivateFieldGet(this, _YargsInstance_usage, "f").help(); - }); - } - } - const builderResponse = __classPrivateFieldGet(this, _YargsInstance_command, "f").runDefaultBuilderOn(this); - if (isPromise(builderResponse)) { - return builderResponse.then(() => { - return __classPrivateFieldGet(this, _YargsInstance_usage, "f").help(); - }); - } - } - return Promise.resolve(__classPrivateFieldGet(this, _YargsInstance_usage, "f").help()); - } - getOptions() { - return __classPrivateFieldGet(this, _YargsInstance_options, "f"); - } - getStrict() { - return __classPrivateFieldGet(this, _YargsInstance_strict, "f"); - } - getStrictCommands() { - return __classPrivateFieldGet(this, _YargsInstance_strictCommands, "f"); - } - getStrictOptions() { - return __classPrivateFieldGet(this, _YargsInstance_strictOptions, "f"); - } - global(globals, global) { - argsert(' [boolean]', [globals, global], arguments.length); - globals = [].concat(globals); - if (global !== false) { - __classPrivateFieldGet(this, _YargsInstance_options, "f").local = __classPrivateFieldGet(this, _YargsInstance_options, "f").local.filter(l => globals.indexOf(l) === -1); - } - else { - globals.forEach(g => { - if (!__classPrivateFieldGet(this, _YargsInstance_options, "f").local.includes(g)) - __classPrivateFieldGet(this, _YargsInstance_options, "f").local.push(g); - }); - } - return this; - } - group(opts, groupName) { - argsert(' ', [opts, groupName], arguments.length); - const existing = __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName] || __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName]; - if (__classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName]) { - delete __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName]; - } - const seen = {}; - __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName] = (existing || []).concat(opts).filter(key => { - if (seen[key]) - return false; - return (seen[key] = true); - }); - return this; - } - hide(key) { - argsert('', [key], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_options, "f").hiddenOptions.push(key); - return this; - } - implies(key, value) { - argsert(' [number|string|array]', [key, value], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").implies(key, value); - return this; - } - locale(locale) { - argsert('[string]', [locale], arguments.length); - if (locale === undefined) { - this[kGuessLocale](); - return __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.getLocale(); - } - __classPrivateFieldSet(this, _YargsInstance_detectLocale, false, "f"); - __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.setLocale(locale); - return this; - } - middleware(callback, applyBeforeValidation, global) { - return __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").addMiddleware(callback, !!applyBeforeValidation, global); - } - nargs(key, value) { - argsert(' [number]', [key, value], arguments.length); - this[kPopulateParserHintSingleValueDictionary](this.nargs.bind(this), 'narg', key, value); - return this; - } - normalize(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('normalize', keys); - return this; - } - number(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('number', keys); - this[kTrackManuallySetKeys](keys); - return this; - } - option(key, opt) { - argsert(' [object]', [key, opt], arguments.length); - if (typeof key === 'object') { - Object.keys(key).forEach(k => { - this.options(k, key[k]); - }); - } - else { - if (typeof opt !== 'object') { - opt = {}; - } - this[kTrackManuallySetKeys](key); - if (__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f") && (key === 'version' || (opt === null || opt === void 0 ? void 0 : opt.alias) === 'version')) { - this[kEmitWarning]([ - '"version" is a reserved word.', - 'Please do one of the following:', - '- Disable version with `yargs.version(false)` if using "version" as an option', - '- Use the built-in `yargs.version` method instead (if applicable)', - '- Use a different option key', - 'https://yargs.js.org/docs/#api-reference-version', - ].join('\n'), undefined, 'versionWarning'); - } - __classPrivateFieldGet(this, _YargsInstance_options, "f").key[key] = true; - if (opt.alias) - this.alias(key, opt.alias); - const deprecate = opt.deprecate || opt.deprecated; - if (deprecate) { - this.deprecateOption(key, deprecate); - } - const demand = opt.demand || opt.required || opt.require; - if (demand) { - this.demand(key, demand); - } - if (opt.demandOption) { - this.demandOption(key, typeof opt.demandOption === 'string' ? opt.demandOption : undefined); - } - if (opt.conflicts) { - this.conflicts(key, opt.conflicts); - } - if ('default' in opt) { - this.default(key, opt.default); - } - if (opt.implies !== undefined) { - this.implies(key, opt.implies); - } - if (opt.nargs !== undefined) { - this.nargs(key, opt.nargs); - } - if (opt.config) { - this.config(key, opt.configParser); - } - if (opt.normalize) { - this.normalize(key); - } - if (opt.choices) { - this.choices(key, opt.choices); - } - if (opt.coerce) { - this.coerce(key, opt.coerce); - } - if (opt.group) { - this.group(key, opt.group); - } - if (opt.boolean || opt.type === 'boolean') { - this.boolean(key); - if (opt.alias) - this.boolean(opt.alias); - } - if (opt.array || opt.type === 'array') { - this.array(key); - if (opt.alias) - this.array(opt.alias); - } - if (opt.number || opt.type === 'number') { - this.number(key); - if (opt.alias) - this.number(opt.alias); - } - if (opt.string || opt.type === 'string') { - this.string(key); - if (opt.alias) - this.string(opt.alias); - } - if (opt.count || opt.type === 'count') { - this.count(key); - } - if (typeof opt.global === 'boolean') { - this.global(key, opt.global); - } - if (opt.defaultDescription) { - __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = opt.defaultDescription; - } - if (opt.skipValidation) { - this.skipValidation(key); - } - const desc = opt.describe || opt.description || opt.desc; - const descriptions = __classPrivateFieldGet(this, _YargsInstance_usage, "f").getDescriptions(); - if (!Object.prototype.hasOwnProperty.call(descriptions, key) || - typeof desc === 'string') { - this.describe(key, desc); - } - if (opt.hidden) { - this.hide(key); - } - if (opt.requiresArg) { - this.requiresArg(key); - } - } - return this; - } - options(key, opt) { - return this.option(key, opt); - } - parse(args, shortCircuit, _parseFn) { - argsert('[string|array] [function|boolean|object] [function]', [args, shortCircuit, _parseFn], arguments.length); - this[kFreeze](); - if (typeof args === 'undefined') { - args = __classPrivateFieldGet(this, _YargsInstance_processArgs, "f"); - } - if (typeof shortCircuit === 'object') { - __classPrivateFieldSet(this, _YargsInstance_parseContext, shortCircuit, "f"); - shortCircuit = _parseFn; - } - if (typeof shortCircuit === 'function') { - __classPrivateFieldSet(this, _YargsInstance_parseFn, shortCircuit, "f"); - shortCircuit = false; - } - if (!shortCircuit) - __classPrivateFieldSet(this, _YargsInstance_processArgs, args, "f"); - if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) - __classPrivateFieldSet(this, _YargsInstance_exitProcess, false, "f"); - const parsed = this[kRunYargsParserAndExecuteCommands](args, !!shortCircuit); - const tmpParsed = this.parsed; - __classPrivateFieldGet(this, _YargsInstance_completion, "f").setParsed(this.parsed); - if (isPromise(parsed)) { - return parsed - .then(argv => { - if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) - __classPrivateFieldGet(this, _YargsInstance_parseFn, "f").call(this, __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), argv, __classPrivateFieldGet(this, _YargsInstance_output, "f")); - return argv; - }) - .catch(err => { - if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) { - __classPrivateFieldGet(this, _YargsInstance_parseFn, "f")(err, this.parsed.argv, __classPrivateFieldGet(this, _YargsInstance_output, "f")); - } - throw err; - }) - .finally(() => { - this[kUnfreeze](); - this.parsed = tmpParsed; - }); - } - else { - if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) - __classPrivateFieldGet(this, _YargsInstance_parseFn, "f").call(this, __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), parsed, __classPrivateFieldGet(this, _YargsInstance_output, "f")); - this[kUnfreeze](); - this.parsed = tmpParsed; - } - return parsed; - } - parseAsync(args, shortCircuit, _parseFn) { - const maybePromise = this.parse(args, shortCircuit, _parseFn); - return !isPromise(maybePromise) - ? Promise.resolve(maybePromise) - : maybePromise; - } - parseSync(args, shortCircuit, _parseFn) { - const maybePromise = this.parse(args, shortCircuit, _parseFn); - if (isPromise(maybePromise)) { - throw new YError('.parseSync() must not be used with asynchronous builders, handlers, or middleware'); - } - return maybePromise; - } - parserConfiguration(config) { - argsert('', [config], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_parserConfig, config, "f"); - return this; - } - pkgConf(key, rootPath) { - argsert(' [string]', [key, rootPath], arguments.length); - let conf = null; - const obj = this[kPkgUp](rootPath || __classPrivateFieldGet(this, _YargsInstance_cwd, "f")); - if (obj[key] && typeof obj[key] === 'object') { - conf = applyExtends(obj[key], rootPath || __classPrivateFieldGet(this, _YargsInstance_cwd, "f"), this[kGetParserConfiguration]()['deep-merge-config'] || false, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = (__classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []).concat(conf); - } - return this; - } - positional(key, opts) { - argsert(' ', [key, opts], arguments.length); - const supportedOpts = [ - 'default', - 'defaultDescription', - 'implies', - 'normalize', - 'choices', - 'conflicts', - 'coerce', - 'type', - 'describe', - 'desc', - 'description', - 'alias', - ]; - opts = objFilter(opts, (k, v) => { - if (k === 'type' && !['string', 'number', 'boolean'].includes(v)) - return false; - return supportedOpts.includes(k); - }); - const fullCommand = __classPrivateFieldGet(this, _YargsInstance_context, "f").fullCommands[__classPrivateFieldGet(this, _YargsInstance_context, "f").fullCommands.length - 1]; - const parseOptions = fullCommand - ? __classPrivateFieldGet(this, _YargsInstance_command, "f").cmdToParseOptions(fullCommand) - : { - array: [], - alias: {}, - default: {}, - demand: {}, - }; - objectKeys(parseOptions).forEach(pk => { - const parseOption = parseOptions[pk]; - if (Array.isArray(parseOption)) { - if (parseOption.indexOf(key) !== -1) - opts[pk] = true; - } - else { - if (parseOption[key] && !(pk in opts)) - opts[pk] = parseOption[key]; - } - }); - this.group(key, __classPrivateFieldGet(this, _YargsInstance_usage, "f").getPositionalGroupName()); - return this.option(key, opts); - } - recommendCommands(recommend = true) { - argsert('[boolean]', [recommend], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_recommendCommands, recommend, "f"); - return this; - } - required(keys, max, msg) { - return this.demand(keys, max, msg); - } - require(keys, max, msg) { - return this.demand(keys, max, msg); - } - requiresArg(keys) { - argsert(' [number]', [keys], arguments.length); - if (typeof keys === 'string' && __classPrivateFieldGet(this, _YargsInstance_options, "f").narg[keys]) { - return this; - } - else { - this[kPopulateParserHintSingleValueDictionary](this.requiresArg.bind(this), 'narg', keys, NaN); - } - return this; - } - showCompletionScript($0, cmd) { - argsert('[string] [string]', [$0, cmd], arguments.length); - $0 = $0 || this.$0; - __classPrivateFieldGet(this, _YargsInstance_logger, "f").log(__classPrivateFieldGet(this, _YargsInstance_completion, "f").generateCompletionScript($0, cmd || __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") || 'completion')); - return this; - } - showHelp(level) { - argsert('[string|function]', [level], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - if (!__classPrivateFieldGet(this, _YargsInstance_usage, "f").hasCachedHelpMessage()) { - if (!this.parsed) { - const parse = this[kRunYargsParserAndExecuteCommands](__classPrivateFieldGet(this, _YargsInstance_processArgs, "f"), undefined, undefined, 0, true); - if (isPromise(parse)) { - parse.then(() => { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); - }); - return this; - } - } - const builderResponse = __classPrivateFieldGet(this, _YargsInstance_command, "f").runDefaultBuilderOn(this); - if (isPromise(builderResponse)) { - builderResponse.then(() => { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); - }); - return this; - } - } - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); - return this; - } - scriptName(scriptName) { - this.customScriptName = true; - this.$0 = scriptName; - return this; - } - showHelpOnFail(enabled, message) { - argsert('[boolean|string] [string]', [enabled, message], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelpOnFail(enabled, message); - return this; - } - showVersion(level) { - argsert('[string|function]', [level], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showVersion(level); - return this; - } - skipValidation(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('skipValidation', keys); - return this; - } - strict(enabled) { - argsert('[boolean]', [enabled], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_strict, enabled !== false, "f"); - return this; - } - strictCommands(enabled) { - argsert('[boolean]', [enabled], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_strictCommands, enabled !== false, "f"); - return this; - } - strictOptions(enabled) { - argsert('[boolean]', [enabled], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_strictOptions, enabled !== false, "f"); - return this; - } - string(keys) { - argsert('', [keys], arguments.length); - this[kPopulateParserHintArray]('string', keys); - this[kTrackManuallySetKeys](keys); - return this; - } - terminalWidth() { - argsert([], 0); - return __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.stdColumns; - } - updateLocale(obj) { - return this.updateStrings(obj); - } - updateStrings(obj) { - argsert('', [obj], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_detectLocale, false, "f"); - __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.updateLocale(obj); - return this; - } - usage(msg, description, builder, handler) { - argsert(' [string|boolean] [function|object] [function]', [msg, description, builder, handler], arguments.length); - if (description !== undefined) { - assertNotStrictEqual(msg, null, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - if ((msg || '').match(/^\$0( |$)/)) { - return this.command(msg, description, builder, handler); - } - else { - throw new YError('.usage() description must start with $0 if being used as alias for .command()'); - } - } - else { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").usage(msg); - return this; - } - } - usageConfiguration(config) { - argsert('', [config], arguments.length); - __classPrivateFieldSet(this, _YargsInstance_usageConfig, config, "f"); - return this; - } - version(opt, msg, ver) { - const defaultVersionOpt = 'version'; - argsert('[boolean|string] [string] [string]', [opt, msg, ver], arguments.length); - if (__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")) { - this[kDeleteFromParserHintObject](__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").version(undefined); - __classPrivateFieldSet(this, _YargsInstance_versionOpt, null, "f"); - } - if (arguments.length === 0) { - ver = this[kGuessVersion](); - opt = defaultVersionOpt; - } - else if (arguments.length === 1) { - if (opt === false) { - return this; - } - ver = opt; - opt = defaultVersionOpt; - } - else if (arguments.length === 2) { - ver = msg; - msg = undefined; - } - __classPrivateFieldSet(this, _YargsInstance_versionOpt, typeof opt === 'string' ? opt : defaultVersionOpt, "f"); - msg = msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show version number'); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").version(ver || undefined); - this.boolean(__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")); - this.describe(__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f"), msg); - return this; - } - wrap(cols) { - argsert('', [cols], arguments.length); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").wrap(cols); - return this; - } - [(_YargsInstance_command = new WeakMap(), _YargsInstance_cwd = new WeakMap(), _YargsInstance_context = new WeakMap(), _YargsInstance_completion = new WeakMap(), _YargsInstance_completionCommand = new WeakMap(), _YargsInstance_defaultShowHiddenOpt = new WeakMap(), _YargsInstance_exitError = new WeakMap(), _YargsInstance_detectLocale = new WeakMap(), _YargsInstance_emittedWarnings = new WeakMap(), _YargsInstance_exitProcess = new WeakMap(), _YargsInstance_frozens = new WeakMap(), _YargsInstance_globalMiddleware = new WeakMap(), _YargsInstance_groups = new WeakMap(), _YargsInstance_hasOutput = new WeakMap(), _YargsInstance_helpOpt = new WeakMap(), _YargsInstance_isGlobalContext = new WeakMap(), _YargsInstance_logger = new WeakMap(), _YargsInstance_output = new WeakMap(), _YargsInstance_options = new WeakMap(), _YargsInstance_parentRequire = new WeakMap(), _YargsInstance_parserConfig = new WeakMap(), _YargsInstance_parseFn = new WeakMap(), _YargsInstance_parseContext = new WeakMap(), _YargsInstance_pkgs = new WeakMap(), _YargsInstance_preservedGroups = new WeakMap(), _YargsInstance_processArgs = new WeakMap(), _YargsInstance_recommendCommands = new WeakMap(), _YargsInstance_shim = new WeakMap(), _YargsInstance_strict = new WeakMap(), _YargsInstance_strictCommands = new WeakMap(), _YargsInstance_strictOptions = new WeakMap(), _YargsInstance_usage = new WeakMap(), _YargsInstance_usageConfig = new WeakMap(), _YargsInstance_versionOpt = new WeakMap(), _YargsInstance_validation = new WeakMap(), kCopyDoubleDash)](argv) { - if (!argv._ || !argv['--']) - return argv; - argv._.push.apply(argv._, argv['--']); - try { - delete argv['--']; - } - catch (_err) { } - return argv; - } - [kCreateLogger]() { - return { - log: (...args) => { - if (!this[kHasParseCallback]()) - console.log(...args); - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - if (__classPrivateFieldGet(this, _YargsInstance_output, "f").length) - __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + '\n', "f"); - __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + args.join(' '), "f"); - }, - error: (...args) => { - if (!this[kHasParseCallback]()) - console.error(...args); - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - if (__classPrivateFieldGet(this, _YargsInstance_output, "f").length) - __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + '\n', "f"); - __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + args.join(' '), "f"); - }, - }; - } - [kDeleteFromParserHintObject](optionKey) { - objectKeys(__classPrivateFieldGet(this, _YargsInstance_options, "f")).forEach((hintKey) => { - if (((key) => key === 'configObjects')(hintKey)) - return; - const hint = __classPrivateFieldGet(this, _YargsInstance_options, "f")[hintKey]; - if (Array.isArray(hint)) { - if (hint.includes(optionKey)) - hint.splice(hint.indexOf(optionKey), 1); - } - else if (typeof hint === 'object') { - delete hint[optionKey]; - } - }); - delete __classPrivateFieldGet(this, _YargsInstance_usage, "f").getDescriptions()[optionKey]; - } - [kEmitWarning](warning, type, deduplicationId) { - if (!__classPrivateFieldGet(this, _YargsInstance_emittedWarnings, "f")[deduplicationId]) { - __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.emitWarning(warning, type); - __classPrivateFieldGet(this, _YargsInstance_emittedWarnings, "f")[deduplicationId] = true; - } - } - [kFreeze]() { - __classPrivateFieldGet(this, _YargsInstance_frozens, "f").push({ - options: __classPrivateFieldGet(this, _YargsInstance_options, "f"), - configObjects: __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects.slice(0), - exitProcess: __classPrivateFieldGet(this, _YargsInstance_exitProcess, "f"), - groups: __classPrivateFieldGet(this, _YargsInstance_groups, "f"), - strict: __classPrivateFieldGet(this, _YargsInstance_strict, "f"), - strictCommands: __classPrivateFieldGet(this, _YargsInstance_strictCommands, "f"), - strictOptions: __classPrivateFieldGet(this, _YargsInstance_strictOptions, "f"), - completionCommand: __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f"), - output: __classPrivateFieldGet(this, _YargsInstance_output, "f"), - exitError: __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), - hasOutput: __classPrivateFieldGet(this, _YargsInstance_hasOutput, "f"), - parsed: this.parsed, - parseFn: __classPrivateFieldGet(this, _YargsInstance_parseFn, "f"), - parseContext: __classPrivateFieldGet(this, _YargsInstance_parseContext, "f"), - }); - __classPrivateFieldGet(this, _YargsInstance_usage, "f").freeze(); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").freeze(); - __classPrivateFieldGet(this, _YargsInstance_command, "f").freeze(); - __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").freeze(); - } - [kGetDollarZero]() { - let $0 = ''; - let default$0; - if (/\b(node|iojs|electron)(\.exe)?$/.test(__classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv()[0])) { - default$0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv().slice(1, 2); - } - else { - default$0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv().slice(0, 1); - } - $0 = default$0 - .map(x => { - const b = this[kRebase](__classPrivateFieldGet(this, _YargsInstance_cwd, "f"), x); - return x.match(/^(\/|([a-zA-Z]:)?\\)/) && b.length < x.length ? b : x; - }) - .join(' ') - .trim(); - if (__classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('_') && - __classPrivateFieldGet(this, _YargsInstance_shim, "f").getProcessArgvBin() === __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('_')) { - $0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f") - .getEnv('_') - .replace(`${__classPrivateFieldGet(this, _YargsInstance_shim, "f").path.dirname(__classPrivateFieldGet(this, _YargsInstance_shim, "f").process.execPath())}/`, ''); - } - return $0; - } - [kGetParserConfiguration]() { - return __classPrivateFieldGet(this, _YargsInstance_parserConfig, "f"); - } - [kGetUsageConfiguration]() { - return __classPrivateFieldGet(this, _YargsInstance_usageConfig, "f"); - } - [kGuessLocale]() { - if (!__classPrivateFieldGet(this, _YargsInstance_detectLocale, "f")) - return; - const locale = __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LC_ALL') || - __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LC_MESSAGES') || - __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LANG') || - __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LANGUAGE') || - 'en_US'; - this.locale(locale.replace(/[.:].*/, '')); - } - [kGuessVersion]() { - const obj = this[kPkgUp](); - return obj.version || 'unknown'; - } - [kParsePositionalNumbers](argv) { - const args = argv['--'] ? argv['--'] : argv._; - for (let i = 0, arg; (arg = args[i]) !== undefined; i++) { - if (__classPrivateFieldGet(this, _YargsInstance_shim, "f").Parser.looksLikeNumber(arg) && - Number.isSafeInteger(Math.floor(parseFloat(`${arg}`)))) { - args[i] = Number(arg); - } - } - return argv; - } - [kPkgUp](rootPath) { - const npath = rootPath || '*'; - if (__classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]) - return __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]; - let obj = {}; - try { - let startDir = rootPath || __classPrivateFieldGet(this, _YargsInstance_shim, "f").mainFilename; - if (!rootPath && __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.extname(startDir)) { - startDir = __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.dirname(startDir); - } - const pkgJsonPath = __classPrivateFieldGet(this, _YargsInstance_shim, "f").findUp(startDir, (dir, names) => { - if (names.includes('package.json')) { - return 'package.json'; - } - else { - return undefined; - } - }); - assertNotStrictEqual(pkgJsonPath, undefined, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - obj = JSON.parse(__classPrivateFieldGet(this, _YargsInstance_shim, "f").readFileSync(pkgJsonPath, 'utf8')); - } - catch (_noop) { } - __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath] = obj || {}; - return __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]; - } - [kPopulateParserHintArray](type, keys) { - keys = [].concat(keys); - keys.forEach(key => { - key = this[kSanitizeKey](key); - __classPrivateFieldGet(this, _YargsInstance_options, "f")[type].push(key); - }); - } - [kPopulateParserHintSingleValueDictionary](builder, type, key, value) { - this[kPopulateParserHintDictionary](builder, type, key, value, (type, key, value) => { - __classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] = value; - }); - } - [kPopulateParserHintArrayDictionary](builder, type, key, value) { - this[kPopulateParserHintDictionary](builder, type, key, value, (type, key, value) => { - __classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] = (__classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] || []).concat(value); - }); - } - [kPopulateParserHintDictionary](builder, type, key, value, singleKeyHandler) { - if (Array.isArray(key)) { - key.forEach(k => { - builder(k, value); - }); - } - else if (((key) => typeof key === 'object')(key)) { - for (const k of objectKeys(key)) { - builder(k, key[k]); - } - } - else { - singleKeyHandler(type, this[kSanitizeKey](key), value); - } - } - [kSanitizeKey](key) { - if (key === '__proto__') - return '___proto___'; - return key; - } - [kSetKey](key, set) { - this[kPopulateParserHintSingleValueDictionary](this[kSetKey].bind(this), 'key', key, set); - return this; - } - [kUnfreeze]() { - var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m; - const frozen = __classPrivateFieldGet(this, _YargsInstance_frozens, "f").pop(); - assertNotStrictEqual(frozen, undefined, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); - let configObjects; - (_a = this, _b = this, _c = this, _d = this, _e = this, _f = this, _g = this, _h = this, _j = this, _k = this, _l = this, _m = this, { - options: ({ set value(_o) { __classPrivateFieldSet(_a, _YargsInstance_options, _o, "f"); } }).value, - configObjects, - exitProcess: ({ set value(_o) { __classPrivateFieldSet(_b, _YargsInstance_exitProcess, _o, "f"); } }).value, - groups: ({ set value(_o) { __classPrivateFieldSet(_c, _YargsInstance_groups, _o, "f"); } }).value, - output: ({ set value(_o) { __classPrivateFieldSet(_d, _YargsInstance_output, _o, "f"); } }).value, - exitError: ({ set value(_o) { __classPrivateFieldSet(_e, _YargsInstance_exitError, _o, "f"); } }).value, - hasOutput: ({ set value(_o) { __classPrivateFieldSet(_f, _YargsInstance_hasOutput, _o, "f"); } }).value, - parsed: this.parsed, - strict: ({ set value(_o) { __classPrivateFieldSet(_g, _YargsInstance_strict, _o, "f"); } }).value, - strictCommands: ({ set value(_o) { __classPrivateFieldSet(_h, _YargsInstance_strictCommands, _o, "f"); } }).value, - strictOptions: ({ set value(_o) { __classPrivateFieldSet(_j, _YargsInstance_strictOptions, _o, "f"); } }).value, - completionCommand: ({ set value(_o) { __classPrivateFieldSet(_k, _YargsInstance_completionCommand, _o, "f"); } }).value, - parseFn: ({ set value(_o) { __classPrivateFieldSet(_l, _YargsInstance_parseFn, _o, "f"); } }).value, - parseContext: ({ set value(_o) { __classPrivateFieldSet(_m, _YargsInstance_parseContext, _o, "f"); } }).value, - } = frozen); - __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = configObjects; - __classPrivateFieldGet(this, _YargsInstance_usage, "f").unfreeze(); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").unfreeze(); - __classPrivateFieldGet(this, _YargsInstance_command, "f").unfreeze(); - __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").unfreeze(); - } - [kValidateAsync](validation, argv) { - return maybeAsyncResult(argv, result => { - validation(result); - return result; - }); - } - getInternalMethods() { - return { - getCommandInstance: this[kGetCommandInstance].bind(this), - getContext: this[kGetContext].bind(this), - getHasOutput: this[kGetHasOutput].bind(this), - getLoggerInstance: this[kGetLoggerInstance].bind(this), - getParseContext: this[kGetParseContext].bind(this), - getParserConfiguration: this[kGetParserConfiguration].bind(this), - getUsageConfiguration: this[kGetUsageConfiguration].bind(this), - getUsageInstance: this[kGetUsageInstance].bind(this), - getValidationInstance: this[kGetValidationInstance].bind(this), - hasParseCallback: this[kHasParseCallback].bind(this), - isGlobalContext: this[kIsGlobalContext].bind(this), - postProcess: this[kPostProcess].bind(this), - reset: this[kReset].bind(this), - runValidation: this[kRunValidation].bind(this), - runYargsParserAndExecuteCommands: this[kRunYargsParserAndExecuteCommands].bind(this), - setHasOutput: this[kSetHasOutput].bind(this), - }; - } - [kGetCommandInstance]() { - return __classPrivateFieldGet(this, _YargsInstance_command, "f"); - } - [kGetContext]() { - return __classPrivateFieldGet(this, _YargsInstance_context, "f"); - } - [kGetHasOutput]() { - return __classPrivateFieldGet(this, _YargsInstance_hasOutput, "f"); - } - [kGetLoggerInstance]() { - return __classPrivateFieldGet(this, _YargsInstance_logger, "f"); - } - [kGetParseContext]() { - return __classPrivateFieldGet(this, _YargsInstance_parseContext, "f") || {}; - } - [kGetUsageInstance]() { - return __classPrivateFieldGet(this, _YargsInstance_usage, "f"); - } - [kGetValidationInstance]() { - return __classPrivateFieldGet(this, _YargsInstance_validation, "f"); - } - [kHasParseCallback]() { - return !!__classPrivateFieldGet(this, _YargsInstance_parseFn, "f"); - } - [kIsGlobalContext]() { - return __classPrivateFieldGet(this, _YargsInstance_isGlobalContext, "f"); - } - [kPostProcess](argv, populateDoubleDash, calledFromCommand, runGlobalMiddleware) { - if (calledFromCommand) - return argv; - if (isPromise(argv)) - return argv; - if (!populateDoubleDash) { - argv = this[kCopyDoubleDash](argv); - } - const parsePositionalNumbers = this[kGetParserConfiguration]()['parse-positional-numbers'] || - this[kGetParserConfiguration]()['parse-positional-numbers'] === undefined; - if (parsePositionalNumbers) { - argv = this[kParsePositionalNumbers](argv); - } - if (runGlobalMiddleware) { - argv = applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), false); - } - return argv; - } - [kReset](aliases = {}) { - __classPrivateFieldSet(this, _YargsInstance_options, __classPrivateFieldGet(this, _YargsInstance_options, "f") || {}, "f"); - const tmpOptions = {}; - tmpOptions.local = __classPrivateFieldGet(this, _YargsInstance_options, "f").local || []; - tmpOptions.configObjects = __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []; - const localLookup = {}; - tmpOptions.local.forEach(l => { - localLookup[l] = true; - (aliases[l] || []).forEach(a => { - localLookup[a] = true; - }); - }); - Object.assign(__classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f"), Object.keys(__classPrivateFieldGet(this, _YargsInstance_groups, "f")).reduce((acc, groupName) => { - const keys = __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName].filter(key => !(key in localLookup)); - if (keys.length > 0) { - acc[groupName] = keys; - } - return acc; - }, {})); - __classPrivateFieldSet(this, _YargsInstance_groups, {}, "f"); - const arrayOptions = [ - 'array', - 'boolean', - 'string', - 'skipValidation', - 'count', - 'normalize', - 'number', - 'hiddenOptions', - ]; - const objectOptions = [ - 'narg', - 'key', - 'alias', - 'default', - 'defaultDescription', - 'config', - 'choices', - 'demandedOptions', - 'demandedCommands', - 'deprecatedOptions', - ]; - arrayOptions.forEach(k => { - tmpOptions[k] = (__classPrivateFieldGet(this, _YargsInstance_options, "f")[k] || []).filter((k) => !localLookup[k]); - }); - objectOptions.forEach((k) => { - tmpOptions[k] = objFilter(__classPrivateFieldGet(this, _YargsInstance_options, "f")[k], k => !localLookup[k]); - }); - tmpOptions.envPrefix = __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix; - __classPrivateFieldSet(this, _YargsInstance_options, tmpOptions, "f"); - __classPrivateFieldSet(this, _YargsInstance_usage, __classPrivateFieldGet(this, _YargsInstance_usage, "f") - ? __classPrivateFieldGet(this, _YargsInstance_usage, "f").reset(localLookup) - : Usage(this, __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); - __classPrivateFieldSet(this, _YargsInstance_validation, __classPrivateFieldGet(this, _YargsInstance_validation, "f") - ? __classPrivateFieldGet(this, _YargsInstance_validation, "f").reset(localLookup) - : Validation(this, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); - __classPrivateFieldSet(this, _YargsInstance_command, __classPrivateFieldGet(this, _YargsInstance_command, "f") - ? __classPrivateFieldGet(this, _YargsInstance_command, "f").reset() - : Command(__classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_validation, "f"), __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); - if (!__classPrivateFieldGet(this, _YargsInstance_completion, "f")) - __classPrivateFieldSet(this, _YargsInstance_completion, Completion(this, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_command, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); - __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").reset(); - __classPrivateFieldSet(this, _YargsInstance_completionCommand, null, "f"); - __classPrivateFieldSet(this, _YargsInstance_output, '', "f"); - __classPrivateFieldSet(this, _YargsInstance_exitError, null, "f"); - __classPrivateFieldSet(this, _YargsInstance_hasOutput, false, "f"); - this.parsed = false; - return this; - } - [kRebase](base, dir) { - return __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.relative(base, dir); - } - [kRunYargsParserAndExecuteCommands](args, shortCircuit, calledFromCommand, commandIndex = 0, helpOnly = false) { - let skipValidation = !!calledFromCommand || helpOnly; - args = args || __classPrivateFieldGet(this, _YargsInstance_processArgs, "f"); - __classPrivateFieldGet(this, _YargsInstance_options, "f").__ = __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.__; - __classPrivateFieldGet(this, _YargsInstance_options, "f").configuration = this[kGetParserConfiguration](); - const populateDoubleDash = !!__classPrivateFieldGet(this, _YargsInstance_options, "f").configuration['populate--']; - const config = Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_options, "f").configuration, { - 'populate--': true, - }); - const parsed = __classPrivateFieldGet(this, _YargsInstance_shim, "f").Parser.detailed(args, Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_options, "f"), { - configuration: { 'parse-positional-numbers': false, ...config }, - })); - const argv = Object.assign(parsed.argv, __classPrivateFieldGet(this, _YargsInstance_parseContext, "f")); - let argvPromise = undefined; - const aliases = parsed.aliases; - let helpOptSet = false; - let versionOptSet = false; - Object.keys(argv).forEach(key => { - if (key === __classPrivateFieldGet(this, _YargsInstance_helpOpt, "f") && argv[key]) { - helpOptSet = true; - } - else if (key === __classPrivateFieldGet(this, _YargsInstance_versionOpt, "f") && argv[key]) { - versionOptSet = true; - } - }); - argv.$0 = this.$0; - this.parsed = parsed; - if (commandIndex === 0) { - __classPrivateFieldGet(this, _YargsInstance_usage, "f").clearCachedHelpMessage(); - } - try { - this[kGuessLocale](); - if (shortCircuit) { - return this[kPostProcess](argv, populateDoubleDash, !!calledFromCommand, false); - } - if (__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")) { - const helpCmds = [__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")] - .concat(aliases[__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")] || []) - .filter(k => k.length > 1); - if (helpCmds.includes('' + argv._[argv._.length - 1])) { - argv._.pop(); - helpOptSet = true; - } - } - __classPrivateFieldSet(this, _YargsInstance_isGlobalContext, false, "f"); - const handlerKeys = __classPrivateFieldGet(this, _YargsInstance_command, "f").getCommands(); - const requestCompletions = __classPrivateFieldGet(this, _YargsInstance_completion, "f").completionKey in argv; - const skipRecommendation = helpOptSet || requestCompletions || helpOnly; - if (argv._.length) { - if (handlerKeys.length) { - let firstUnknownCommand; - for (let i = commandIndex || 0, cmd; argv._[i] !== undefined; i++) { - cmd = String(argv._[i]); - if (handlerKeys.includes(cmd) && cmd !== __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) { - const innerArgv = __classPrivateFieldGet(this, _YargsInstance_command, "f").runCommand(cmd, this, parsed, i + 1, helpOnly, helpOptSet || versionOptSet || helpOnly); - return this[kPostProcess](innerArgv, populateDoubleDash, !!calledFromCommand, false); - } - else if (!firstUnknownCommand && - cmd !== __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) { - firstUnknownCommand = cmd; - break; - } - } - if (!__classPrivateFieldGet(this, _YargsInstance_command, "f").hasDefaultCommand() && - __classPrivateFieldGet(this, _YargsInstance_recommendCommands, "f") && - firstUnknownCommand && - !skipRecommendation) { - __classPrivateFieldGet(this, _YargsInstance_validation, "f").recommendCommands(firstUnknownCommand, handlerKeys); - } - } - if (__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") && - argv._.includes(__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) && - !requestCompletions) { - if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) - setBlocking(true); - this.showCompletionScript(); - this.exit(0); - } - } - if (__classPrivateFieldGet(this, _YargsInstance_command, "f").hasDefaultCommand() && !skipRecommendation) { - const innerArgv = __classPrivateFieldGet(this, _YargsInstance_command, "f").runCommand(null, this, parsed, 0, helpOnly, helpOptSet || versionOptSet || helpOnly); - return this[kPostProcess](innerArgv, populateDoubleDash, !!calledFromCommand, false); - } - if (requestCompletions) { - if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) - setBlocking(true); - args = [].concat(args); - const completionArgs = args.slice(args.indexOf(`--${__classPrivateFieldGet(this, _YargsInstance_completion, "f").completionKey}`) + 1); - __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(completionArgs, (err, completions) => { - if (err) - throw new YError(err.message); - (completions || []).forEach(completion => { - __classPrivateFieldGet(this, _YargsInstance_logger, "f").log(completion); - }); - this.exit(0); - }); - return this[kPostProcess](argv, !populateDoubleDash, !!calledFromCommand, false); - } - if (!__classPrivateFieldGet(this, _YargsInstance_hasOutput, "f")) { - if (helpOptSet) { - if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) - setBlocking(true); - skipValidation = true; - this.showHelp('log'); - this.exit(0); - } - else if (versionOptSet) { - if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) - setBlocking(true); - skipValidation = true; - __classPrivateFieldGet(this, _YargsInstance_usage, "f").showVersion('log'); - this.exit(0); - } - } - if (!skipValidation && __classPrivateFieldGet(this, _YargsInstance_options, "f").skipValidation.length > 0) { - skipValidation = Object.keys(argv).some(key => __classPrivateFieldGet(this, _YargsInstance_options, "f").skipValidation.indexOf(key) >= 0 && argv[key] === true); - } - if (!skipValidation) { - if (parsed.error) - throw new YError(parsed.error.message); - if (!requestCompletions) { - const validation = this[kRunValidation](aliases, {}, parsed.error); - if (!calledFromCommand) { - argvPromise = applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), true); - } - argvPromise = this[kValidateAsync](validation, argvPromise !== null && argvPromise !== void 0 ? argvPromise : argv); - if (isPromise(argvPromise) && !calledFromCommand) { - argvPromise = argvPromise.then(() => { - return applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), false); - }); - } - } - } - } - catch (err) { - if (err instanceof YError) - __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(err.message, err); - else - throw err; - } - return this[kPostProcess](argvPromise !== null && argvPromise !== void 0 ? argvPromise : argv, populateDoubleDash, !!calledFromCommand, true); - } - [kRunValidation](aliases, positionalMap, parseErrors, isDefaultCommand) { - const demandedOptions = { ...this.getDemandedOptions() }; - return (argv) => { - if (parseErrors) - throw new YError(parseErrors.message); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").nonOptionCount(argv); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").requiredArguments(argv, demandedOptions); - let failedStrictCommands = false; - if (__classPrivateFieldGet(this, _YargsInstance_strictCommands, "f")) { - failedStrictCommands = __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownCommands(argv); - } - if (__classPrivateFieldGet(this, _YargsInstance_strict, "f") && !failedStrictCommands) { - __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownArguments(argv, aliases, positionalMap, !!isDefaultCommand); - } - else if (__classPrivateFieldGet(this, _YargsInstance_strictOptions, "f")) { - __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownArguments(argv, aliases, {}, false, false); - } - __classPrivateFieldGet(this, _YargsInstance_validation, "f").limitedChoices(argv); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").implications(argv); - __classPrivateFieldGet(this, _YargsInstance_validation, "f").conflicting(argv); - }; - } - [kSetHasOutput]() { - __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); - } - [kTrackManuallySetKeys](keys) { - if (typeof keys === 'string') { - __classPrivateFieldGet(this, _YargsInstance_options, "f").key[keys] = true; - } - else { - for (const k of keys) { - __classPrivateFieldGet(this, _YargsInstance_options, "f").key[k] = true; - } - } - } -} -export function isYargsInstance(y) { - return !!y && typeof y.getInternalMethods === 'function'; -} diff --git a/node_modules/yargs/build/lib/yerror.js b/node_modules/yargs/build/lib/yerror.js deleted file mode 100644 index 7a36684..0000000 --- a/node_modules/yargs/build/lib/yerror.js +++ /dev/null @@ -1,9 +0,0 @@ -export class YError extends Error { - constructor(msg) { - super(msg || 'yargs error'); - this.name = 'YError'; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, YError); - } - } -} diff --git a/node_modules/yargs/helpers/helpers.mjs b/node_modules/yargs/helpers/helpers.mjs deleted file mode 100644 index 3f96b3d..0000000 --- a/node_modules/yargs/helpers/helpers.mjs +++ /dev/null @@ -1,10 +0,0 @@ -import {applyExtends as _applyExtends} from '../build/lib/utils/apply-extends.js'; -import {hideBin} from '../build/lib/utils/process-argv.js'; -import Parser from 'yargs-parser'; -import shim from '../lib/platform-shims/esm.mjs'; - -const applyExtends = (config, cwd, mergeExtends) => { - return _applyExtends(config, cwd, mergeExtends, shim); -}; - -export {applyExtends, hideBin, Parser}; diff --git a/node_modules/yargs/helpers/index.js b/node_modules/yargs/helpers/index.js deleted file mode 100644 index 8ab79a3..0000000 --- a/node_modules/yargs/helpers/index.js +++ /dev/null @@ -1,14 +0,0 @@ -const { - applyExtends, - cjsPlatformShim, - Parser, - processArgv, -} = require('../build/index.cjs'); - -module.exports = { - applyExtends: (config, cwd, mergeExtends) => { - return applyExtends(config, cwd, mergeExtends, cjsPlatformShim); - }, - hideBin: processArgv.hideBin, - Parser, -}; diff --git a/node_modules/yargs/helpers/package.json b/node_modules/yargs/helpers/package.json deleted file mode 100644 index 5bbefff..0000000 --- a/node_modules/yargs/helpers/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/yargs/index.cjs b/node_modules/yargs/index.cjs deleted file mode 100644 index d1eee82..0000000 --- a/node_modules/yargs/index.cjs +++ /dev/null @@ -1,53 +0,0 @@ -'use strict'; -// classic singleton yargs API, to use yargs -// without running as a singleton do: -// require('yargs/yargs')(process.argv.slice(2)) -const {Yargs, processArgv} = require('./build/index.cjs'); - -Argv(processArgv.hideBin(process.argv)); - -module.exports = Argv; - -function Argv(processArgs, cwd) { - const argv = Yargs(processArgs, cwd, require); - singletonify(argv); - // TODO(bcoe): warn if argv.parse() or argv.argv is used directly. - return argv; -} - -function defineGetter(obj, key, getter) { - Object.defineProperty(obj, key, { - configurable: true, - enumerable: true, - get: getter, - }); -} -function lookupGetter(obj, key) { - const desc = Object.getOwnPropertyDescriptor(obj, key); - if (typeof desc !== 'undefined') { - return desc.get; - } -} - -/* Hack an instance of Argv with process.argv into Argv - so people can do - require('yargs')(['--beeble=1','-z','zizzle']).argv - to parse a list of args and - require('yargs').argv - to get a parsed version of process.argv. -*/ -function singletonify(inst) { - [ - ...Object.keys(inst), - ...Object.getOwnPropertyNames(inst.constructor.prototype), - ].forEach(key => { - if (key === 'argv') { - defineGetter(Argv, key, lookupGetter(inst, key)); - } else if (typeof inst[key] === 'function') { - Argv[key] = inst[key].bind(inst); - } else { - defineGetter(Argv, '$0', () => inst.$0); - defineGetter(Argv, 'parsed', () => inst.parsed); - } - }); -} diff --git a/node_modules/yargs/index.mjs b/node_modules/yargs/index.mjs deleted file mode 100644 index c6440b9..0000000 --- a/node_modules/yargs/index.mjs +++ /dev/null @@ -1,8 +0,0 @@ -'use strict'; - -// Bootstraps yargs for ESM: -import esmPlatformShim from './lib/platform-shims/esm.mjs'; -import {YargsFactory} from './build/lib/yargs-factory.js'; - -const Yargs = YargsFactory(esmPlatformShim); -export default Yargs; diff --git a/node_modules/yargs/lib/platform-shims/browser.mjs b/node_modules/yargs/lib/platform-shims/browser.mjs deleted file mode 100644 index 5f8ec61..0000000 --- a/node_modules/yargs/lib/platform-shims/browser.mjs +++ /dev/null @@ -1,95 +0,0 @@ -/* eslint-disable no-unused-vars */ -'use strict'; - -import cliui from 'https://unpkg.com/cliui@7.0.1/index.mjs'; // eslint-disable-line -import Parser from 'https://unpkg.com/yargs-parser@19.0.0/browser.js'; // eslint-disable-line -import {getProcessArgvBin} from '../../build/lib/utils/process-argv.js'; -import {YError} from '../../build/lib/yerror.js'; - -const REQUIRE_ERROR = 'require is not supported in browser'; -const REQUIRE_DIRECTORY_ERROR = - 'loading a directory of commands is not supported in browser'; - -export default { - assert: { - notStrictEqual: (a, b) => { - // noop. - }, - strictEqual: (a, b) => { - // noop. - }, - }, - cliui, - findUp: () => undefined, - getEnv: key => { - // There is no environment in browser: - return undefined; - }, - inspect: console.log, - getCallerFile: () => { - throw new YError(REQUIRE_DIRECTORY_ERROR); - }, - getProcessArgvBin, - mainFilename: 'yargs', - Parser, - path: { - basename: str => str, - dirname: str => str, - extname: str => str, - relative: str => str, - }, - process: { - argv: () => [], - cwd: () => '', - emitWarning: (warning, name) => {}, - execPath: () => '', - // exit is noop browser: - exit: () => {}, - nextTick: cb => { - // eslint-disable-next-line no-undef - window.setTimeout(cb, 1); - }, - stdColumns: 80, - }, - readFileSync: () => { - return ''; - }, - require: () => { - throw new YError(REQUIRE_ERROR); - }, - requireDirectory: () => { - throw new YError(REQUIRE_DIRECTORY_ERROR); - }, - stringWidth: str => { - return [...str].length; - }, - // TODO: replace this with y18n once it's ported to ESM: - y18n: { - __: (...str) => { - if (str.length === 0) return ''; - const args = str.slice(1); - return sprintf(str[0], ...args); - }, - __n: (str1, str2, count, ...args) => { - if (count === 1) { - return sprintf(str1, ...args); - } else { - return sprintf(str2, ...args); - } - }, - getLocale: () => { - return 'en_US'; - }, - setLocale: () => {}, - updateLocale: () => {}, - }, -}; - -function sprintf(_str, ...args) { - let str = ''; - const split = _str.split('%s'); - split.forEach((token, i) => { - str += `${token}${split[i + 1] !== undefined && args[i] ? args[i] : ''}`; - }); - return str; -} diff --git a/node_modules/yargs/lib/platform-shims/esm.mjs b/node_modules/yargs/lib/platform-shims/esm.mjs deleted file mode 100644 index c25baa5..0000000 --- a/node_modules/yargs/lib/platform-shims/esm.mjs +++ /dev/null @@ -1,73 +0,0 @@ -'use strict' - -import { notStrictEqual, strictEqual } from 'assert' -import cliui from 'cliui' -import escalade from 'escalade/sync' -import { inspect } from 'util' -import { readFileSync } from 'fs' -import { fileURLToPath } from 'url'; -import Parser from 'yargs-parser' -import { basename, dirname, extname, relative, resolve } from 'path' -import { getProcessArgvBin } from '../../build/lib/utils/process-argv.js' -import { YError } from '../../build/lib/yerror.js' -import y18n from 'y18n' - -const REQUIRE_ERROR = 'require is not supported by ESM' -const REQUIRE_DIRECTORY_ERROR = 'loading a directory of commands is not supported yet for ESM' - -let __dirname; -try { - __dirname = fileURLToPath(import.meta.url); -} catch (e) { - __dirname = process.cwd(); -} -const mainFilename = __dirname.substring(0, __dirname.lastIndexOf('node_modules')); - -export default { - assert: { - notStrictEqual, - strictEqual - }, - cliui, - findUp: escalade, - getEnv: (key) => { - return process.env[key] - }, - inspect, - getCallerFile: () => { - throw new YError(REQUIRE_DIRECTORY_ERROR) - }, - getProcessArgvBin, - mainFilename: mainFilename || process.cwd(), - Parser, - path: { - basename, - dirname, - extname, - relative, - resolve - }, - process: { - argv: () => process.argv, - cwd: process.cwd, - emitWarning: (warning, type) => process.emitWarning(warning, type), - execPath: () => process.execPath, - exit: process.exit, - nextTick: process.nextTick, - stdColumns: typeof process.stdout.columns !== 'undefined' ? process.stdout.columns : null - }, - readFileSync, - require: () => { - throw new YError(REQUIRE_ERROR) - }, - requireDirectory: () => { - throw new YError(REQUIRE_DIRECTORY_ERROR) - }, - stringWidth: (str) => { - return [...str].length - }, - y18n: y18n({ - directory: resolve(__dirname, '../../../locales'), - updateFiles: false - }) -} diff --git a/node_modules/yargs/locales/be.json b/node_modules/yargs/locales/be.json deleted file mode 100644 index e28fa30..0000000 --- a/node_modules/yargs/locales/be.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "Каманды:", - "Options:": "Опцыі:", - "Examples:": "Прыклады:", - "boolean": "булевы тып", - "count": "падлік", - "string": "радковы тып", - "number": "лік", - "array": "масіў", - "required": "неабходна", - "default": "па змаўчанні", - "default:": "па змаўчанні:", - "choices:": "магчымасці:", - "aliases:": "аліасы:", - "generated-value": "згенераванае значэнне", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Недастаткова неапцыйных аргументаў: ёсць %s, трэба як мінімум %s", - "other": "Недастаткова неапцыйных аргументаў: ёсць %s, трэба як мінімум %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Занадта шмат неапцыйных аргументаў: ёсць %s, максімум дапушчальна %s", - "other": "Занадта шмат неапцыйных аргументаў: ёсць %s, максімум дапушчальна %s" - }, - "Missing argument value: %s": { - "one": "Не хапае значэння аргументу: %s", - "other": "Не хапае значэнняў аргументаў: %s" - }, - "Missing required argument: %s": { - "one": "Не хапае неабходнага аргументу: %s", - "other": "Не хапае неабходных аргументаў: %s" - }, - "Unknown argument: %s": { - "one": "Невядомы аргумент: %s", - "other": "Невядомыя аргументы: %s" - }, - "Invalid values:": "Несапраўдныя значэння:", - "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Дадзенае значэнне: %s, Магчымасці: %s", - "Argument check failed: %s": "Праверка аргументаў не ўдалася: %s", - "Implications failed:": "Дадзены аргумент патрабуе наступны дадатковы аргумент:", - "Not enough arguments following: %s": "Недастаткова наступных аргументаў: %s", - "Invalid JSON config file: %s": "Несапраўдны файл канфігурацыі JSON: %s", - "Path to JSON config file": "Шлях да файла канфігурацыі JSON", - "Show help": "Паказаць дапамогу", - "Show version number": "Паказаць нумар версіі", - "Did you mean %s?": "Вы мелі на ўвазе %s?" -} diff --git a/node_modules/yargs/locales/cs.json b/node_modules/yargs/locales/cs.json deleted file mode 100644 index 6394875..0000000 --- a/node_modules/yargs/locales/cs.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Commands:": "Příkazy:", - "Options:": "Možnosti:", - "Examples:": "Příklady:", - "boolean": "logická hodnota", - "count": "počet", - "string": "řetězec", - "number": "číslo", - "array": "pole", - "required": "povinné", - "default": "výchozí", - "default:": "výchozí:", - "choices:": "volby:", - "aliases:": "aliasy:", - "generated-value": "generovaná-hodnota", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Nedostatek argumentů: zadáno %s, je potřeba alespoň %s", - "other": "Nedostatek argumentů: zadáno %s, je potřeba alespoň %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Příliš mnoho argumentů: zadáno %s, maximálně %s", - "other": "Příliš mnoho argumentů: zadáno %s, maximálně %s" - }, - "Missing argument value: %s": { - "one": "Chybí hodnota argumentu: %s", - "other": "Chybí hodnoty argumentů: %s" - }, - "Missing required argument: %s": { - "one": "Chybí požadovaný argument: %s", - "other": "Chybí požadované argumenty: %s" - }, - "Unknown argument: %s": { - "one": "Neznámý argument: %s", - "other": "Neznámé argumenty: %s" - }, - "Invalid values:": "Neplatné hodnoty:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Zadáno: %s, Možnosti: %s", - "Argument check failed: %s": "Kontrola argumentů se nezdařila: %s", - "Implications failed:": "Chybí závislé argumenty:", - "Not enough arguments following: %s": "Následuje nedostatek argumentů: %s", - "Invalid JSON config file: %s": "Neplatný konfigurační soubor JSON: %s", - "Path to JSON config file": "Cesta ke konfiguračnímu souboru JSON", - "Show help": "Zobrazit nápovědu", - "Show version number": "Zobrazit číslo verze", - "Did you mean %s?": "Měl jste na mysli %s?", - "Arguments %s and %s are mutually exclusive" : "Argumenty %s a %s se vzájemně vylučují", - "Positionals:": "Poziční:", - "command": "příkaz", - "deprecated": "zastaralé", - "deprecated: %s": "zastaralé: %s" -} diff --git a/node_modules/yargs/locales/de.json b/node_modules/yargs/locales/de.json deleted file mode 100644 index dc73ec3..0000000 --- a/node_modules/yargs/locales/de.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "Kommandos:", - "Options:": "Optionen:", - "Examples:": "Beispiele:", - "boolean": "boolean", - "count": "Zähler", - "string": "string", - "number": "Zahl", - "array": "array", - "required": "erforderlich", - "default": "Standard", - "default:": "Standard:", - "choices:": "Möglichkeiten:", - "aliases:": "Aliase:", - "generated-value": "Generierter-Wert", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Nicht genügend Argumente ohne Optionen: %s vorhanden, mindestens %s benötigt", - "other": "Nicht genügend Argumente ohne Optionen: %s vorhanden, mindestens %s benötigt" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Zu viele Argumente ohne Optionen: %s vorhanden, maximal %s erlaubt", - "other": "Zu viele Argumente ohne Optionen: %s vorhanden, maximal %s erlaubt" - }, - "Missing argument value: %s": { - "one": "Fehlender Argumentwert: %s", - "other": "Fehlende Argumentwerte: %s" - }, - "Missing required argument: %s": { - "one": "Fehlendes Argument: %s", - "other": "Fehlende Argumente: %s" - }, - "Unknown argument: %s": { - "one": "Unbekanntes Argument: %s", - "other": "Unbekannte Argumente: %s" - }, - "Invalid values:": "Unzulässige Werte:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gegeben: %s, Möglichkeiten: %s", - "Argument check failed: %s": "Argumente-Check fehlgeschlagen: %s", - "Implications failed:": "Fehlende abhängige Argumente:", - "Not enough arguments following: %s": "Nicht genügend Argumente nach: %s", - "Invalid JSON config file: %s": "Fehlerhafte JSON-Config Datei: %s", - "Path to JSON config file": "Pfad zur JSON-Config Datei", - "Show help": "Hilfe anzeigen", - "Show version number": "Version anzeigen", - "Did you mean %s?": "Meintest du %s?" -} diff --git a/node_modules/yargs/locales/en.json b/node_modules/yargs/locales/en.json deleted file mode 100644 index af096a1..0000000 --- a/node_modules/yargs/locales/en.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "Commands:": "Commands:", - "Options:": "Options:", - "Examples:": "Examples:", - "boolean": "boolean", - "count": "count", - "string": "string", - "number": "number", - "array": "array", - "required": "required", - "default": "default", - "default:": "default:", - "choices:": "choices:", - "aliases:": "aliases:", - "generated-value": "generated-value", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Not enough non-option arguments: got %s, need at least %s", - "other": "Not enough non-option arguments: got %s, need at least %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Too many non-option arguments: got %s, maximum of %s", - "other": "Too many non-option arguments: got %s, maximum of %s" - }, - "Missing argument value: %s": { - "one": "Missing argument value: %s", - "other": "Missing argument values: %s" - }, - "Missing required argument: %s": { - "one": "Missing required argument: %s", - "other": "Missing required arguments: %s" - }, - "Unknown argument: %s": { - "one": "Unknown argument: %s", - "other": "Unknown arguments: %s" - }, - "Unknown command: %s": { - "one": "Unknown command: %s", - "other": "Unknown commands: %s" - }, - "Invalid values:": "Invalid values:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Given: %s, Choices: %s", - "Argument check failed: %s": "Argument check failed: %s", - "Implications failed:": "Missing dependent arguments:", - "Not enough arguments following: %s": "Not enough arguments following: %s", - "Invalid JSON config file: %s": "Invalid JSON config file: %s", - "Path to JSON config file": "Path to JSON config file", - "Show help": "Show help", - "Show version number": "Show version number", - "Did you mean %s?": "Did you mean %s?", - "Arguments %s and %s are mutually exclusive" : "Arguments %s and %s are mutually exclusive", - "Positionals:": "Positionals:", - "command": "command", - "deprecated": "deprecated", - "deprecated: %s": "deprecated: %s" -} diff --git a/node_modules/yargs/locales/es.json b/node_modules/yargs/locales/es.json deleted file mode 100644 index d77b461..0000000 --- a/node_modules/yargs/locales/es.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "Comandos:", - "Options:": "Opciones:", - "Examples:": "Ejemplos:", - "boolean": "booleano", - "count": "cuenta", - "string": "cadena de caracteres", - "number": "número", - "array": "tabla", - "required": "requerido", - "default": "defecto", - "default:": "defecto:", - "choices:": "selección:", - "aliases:": "alias:", - "generated-value": "valor-generado", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Hacen falta argumentos no-opcionales: Número recibido %s, necesita por lo menos %s", - "other": "Hacen falta argumentos no-opcionales: Número recibido %s, necesita por lo menos %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Demasiados argumentos no-opcionales: Número recibido %s, máximo es %s", - "other": "Demasiados argumentos no-opcionales: Número recibido %s, máximo es %s" - }, - "Missing argument value: %s": { - "one": "Falta argumento: %s", - "other": "Faltan argumentos: %s" - }, - "Missing required argument: %s": { - "one": "Falta argumento requerido: %s", - "other": "Faltan argumentos requeridos: %s" - }, - "Unknown argument: %s": { - "one": "Argumento desconocido: %s", - "other": "Argumentos desconocidos: %s" - }, - "Invalid values:": "Valores inválidos:", - "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Recibido: %s, Seleccionados: %s", - "Argument check failed: %s": "Verificación de argumento ha fallado: %s", - "Implications failed:": "Implicaciones fallidas:", - "Not enough arguments following: %s": "No hay suficientes argumentos después de: %s", - "Invalid JSON config file: %s": "Archivo de configuración JSON inválido: %s", - "Path to JSON config file": "Ruta al archivo de configuración JSON", - "Show help": "Muestra ayuda", - "Show version number": "Muestra número de versión", - "Did you mean %s?": "Quisiste decir %s?" -} diff --git a/node_modules/yargs/locales/fi.json b/node_modules/yargs/locales/fi.json deleted file mode 100644 index 481feb7..0000000 --- a/node_modules/yargs/locales/fi.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "Commands:": "Komennot:", - "Options:": "Valinnat:", - "Examples:": "Esimerkkejä:", - "boolean": "totuusarvo", - "count": "lukumäärä", - "string": "merkkijono", - "number": "numero", - "array": "taulukko", - "required": "pakollinen", - "default": "oletusarvo", - "default:": "oletusarvo:", - "choices:": "vaihtoehdot:", - "aliases:": "aliakset:", - "generated-value": "generoitu-arvo", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Liian vähän argumentteja, jotka eivät ole valintoja: annettu %s, vaaditaan vähintään %s", - "other": "Liian vähän argumentteja, jotka eivät ole valintoja: annettu %s, vaaditaan vähintään %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Liikaa argumentteja, jotka eivät ole valintoja: annettu %s, sallitaan enintään %s", - "other": "Liikaa argumentteja, jotka eivät ole valintoja: annettu %s, sallitaan enintään %s" - }, - "Missing argument value: %s": { - "one": "Argumentin arvo puuttuu: %s", - "other": "Argumentin arvot puuttuvat: %s" - }, - "Missing required argument: %s": { - "one": "Pakollinen argumentti puuttuu: %s", - "other": "Pakollisia argumentteja puuttuu: %s" - }, - "Unknown argument: %s": { - "one": "Tuntematon argumentti: %s", - "other": "Tuntemattomia argumentteja: %s" - }, - "Invalid values:": "Virheelliset arvot:", - "Argument: %s, Given: %s, Choices: %s": "Argumentti: %s, Annettu: %s, Vaihtoehdot: %s", - "Argument check failed: %s": "Argumentin tarkistus epäonnistui: %s", - "Implications failed:": "Riippuvia argumentteja puuttuu:", - "Not enough arguments following: %s": "Argumentin perässä ei ole tarpeeksi argumentteja: %s", - "Invalid JSON config file: %s": "Epävalidi JSON-asetustiedosto: %s", - "Path to JSON config file": "JSON-asetustiedoston polku", - "Show help": "Näytä ohje", - "Show version number": "Näytä versionumero", - "Did you mean %s?": "Tarkoititko %s?", - "Arguments %s and %s are mutually exclusive" : "Argumentit %s ja %s eivät ole yhteensopivat", - "Positionals:": "Sijaintiparametrit:", - "command": "komento" -} diff --git a/node_modules/yargs/locales/fr.json b/node_modules/yargs/locales/fr.json deleted file mode 100644 index edd743f..0000000 --- a/node_modules/yargs/locales/fr.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "Commands:": "Commandes :", - "Options:": "Options :", - "Examples:": "Exemples :", - "boolean": "booléen", - "count": "compteur", - "string": "chaîne de caractères", - "number": "nombre", - "array": "tableau", - "required": "requis", - "default": "défaut", - "default:": "défaut :", - "choices:": "choix :", - "aliases:": "alias :", - "generated-value": "valeur générée", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Pas assez d'arguments (hors options) : reçu %s, besoin d'au moins %s", - "other": "Pas assez d'arguments (hors options) : reçus %s, besoin d'au moins %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Trop d'arguments (hors options) : reçu %s, maximum de %s", - "other": "Trop d'arguments (hors options) : reçus %s, maximum de %s" - }, - "Missing argument value: %s": { - "one": "Argument manquant : %s", - "other": "Arguments manquants : %s" - }, - "Missing required argument: %s": { - "one": "Argument requis manquant : %s", - "other": "Arguments requis manquants : %s" - }, - "Unknown argument: %s": { - "one": "Argument inconnu : %s", - "other": "Arguments inconnus : %s" - }, - "Unknown command: %s": { - "one": "Commande inconnue : %s", - "other": "Commandes inconnues : %s" - }, - "Invalid values:": "Valeurs invalides :", - "Argument: %s, Given: %s, Choices: %s": "Argument : %s, donné : %s, choix : %s", - "Argument check failed: %s": "Echec de la vérification de l'argument : %s", - "Implications failed:": "Arguments dépendants manquants :", - "Not enough arguments following: %s": "Pas assez d'arguments après : %s", - "Invalid JSON config file: %s": "Fichier de configuration JSON invalide : %s", - "Path to JSON config file": "Chemin du fichier de configuration JSON", - "Show help": "Affiche l'aide", - "Show version number": "Affiche le numéro de version", - "Did you mean %s?": "Vouliez-vous dire %s ?", - "Arguments %s and %s are mutually exclusive" : "Les arguments %s et %s sont mutuellement exclusifs", - "Positionals:": "Arguments positionnels :", - "command": "commande" -} diff --git a/node_modules/yargs/locales/hi.json b/node_modules/yargs/locales/hi.json deleted file mode 100644 index a9de77c..0000000 --- a/node_modules/yargs/locales/hi.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "Commands:": "आदेश:", - "Options:": "विकल्प:", - "Examples:": "उदाहरण:", - "boolean": "सत्यता", - "count": "संख्या", - "string": "वर्णों का तार ", - "number": "अंक", - "array": "सरणी", - "required": "आवश्यक", - "default": "डिफॉल्ट", - "default:": "डिफॉल्ट:", - "choices:": "विकल्प:", - "aliases:": "उपनाम:", - "generated-value": "उत्पन्न-मूल्य", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "पर्याप्त गैर-विकल्प तर्क प्राप्त नहीं: %s प्राप्त, कम से कम %s की आवश्यकता है", - "other": "पर्याप्त गैर-विकल्प तर्क प्राप्त नहीं: %s प्राप्त, कम से कम %s की आवश्यकता है" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "बहुत सारे गैर-विकल्प तर्क: %s प्राप्त, अधिकतम %s मान्य", - "other": "बहुत सारे गैर-विकल्प तर्क: %s प्राप्त, अधिकतम %s मान्य" - }, - "Missing argument value: %s": { - "one": "कुछ तर्को के मूल्य गुम हैं: %s", - "other": "कुछ तर्को के मूल्य गुम हैं: %s" - }, - "Missing required argument: %s": { - "one": "आवश्यक तर्क गुम हैं: %s", - "other": "आवश्यक तर्क गुम हैं: %s" - }, - "Unknown argument: %s": { - "one": "अज्ञात तर्क प्राप्त: %s", - "other": "अज्ञात तर्क प्राप्त: %s" - }, - "Invalid values:": "अमान्य मूल्य:", - "Argument: %s, Given: %s, Choices: %s": "तर्क: %s, प्राप्त: %s, विकल्प: %s", - "Argument check failed: %s": "तर्क जांच विफल: %s", - "Implications failed:": "दिए गए तर्क के लिए अतिरिक्त तर्क की अपेक्षा है:", - "Not enough arguments following: %s": "निम्नलिखित के बाद पर्याप्त तर्क नहीं प्राप्त: %s", - "Invalid JSON config file: %s": "अमान्य JSON config फाइल: %s", - "Path to JSON config file": "JSON config फाइल का पथ", - "Show help": "सहायता दिखाएँ", - "Show version number": "Version संख्या दिखाएँ", - "Did you mean %s?": "क्या आपका मतलब है %s?", - "Arguments %s and %s are mutually exclusive" : "तर्क %s और %s परस्पर अनन्य हैं", - "Positionals:": "स्थानीय:", - "command": "आदेश" -} diff --git a/node_modules/yargs/locales/hu.json b/node_modules/yargs/locales/hu.json deleted file mode 100644 index 21492d0..0000000 --- a/node_modules/yargs/locales/hu.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "Parancsok:", - "Options:": "Opciók:", - "Examples:": "Példák:", - "boolean": "boolean", - "count": "számláló", - "string": "szöveg", - "number": "szám", - "array": "tömb", - "required": "kötelező", - "default": "alapértelmezett", - "default:": "alapértelmezett:", - "choices:": "lehetőségek:", - "aliases:": "aliaszok:", - "generated-value": "generált-érték", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Nincs elég nem opcionális argumentum: %s van, legalább %s kell", - "other": "Nincs elég nem opcionális argumentum: %s van, legalább %s kell" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Túl sok nem opciánlis argumentum van: %s van, maximum %s lehet", - "other": "Túl sok nem opciánlis argumentum van: %s van, maximum %s lehet" - }, - "Missing argument value: %s": { - "one": "Hiányzó argumentum érték: %s", - "other": "Hiányzó argumentum értékek: %s" - }, - "Missing required argument: %s": { - "one": "Hiányzó kötelező argumentum: %s", - "other": "Hiányzó kötelező argumentumok: %s" - }, - "Unknown argument: %s": { - "one": "Ismeretlen argumentum: %s", - "other": "Ismeretlen argumentumok: %s" - }, - "Invalid values:": "Érvénytelen érték:", - "Argument: %s, Given: %s, Choices: %s": "Argumentum: %s, Megadott: %s, Lehetőségek: %s", - "Argument check failed: %s": "Argumentum ellenőrzés sikertelen: %s", - "Implications failed:": "Implikációk sikertelenek:", - "Not enough arguments following: %s": "Nem elég argumentum követi: %s", - "Invalid JSON config file: %s": "Érvénytelen JSON konfigurációs file: %s", - "Path to JSON config file": "JSON konfigurációs file helye", - "Show help": "Súgo megjelenítése", - "Show version number": "Verziószám megjelenítése", - "Did you mean %s?": "Erre gondoltál %s?" -} diff --git a/node_modules/yargs/locales/id.json b/node_modules/yargs/locales/id.json deleted file mode 100644 index 125867c..0000000 --- a/node_modules/yargs/locales/id.json +++ /dev/null @@ -1,50 +0,0 @@ - -{ - "Commands:": "Perintah:", - "Options:": "Pilihan:", - "Examples:": "Contoh:", - "boolean": "boolean", - "count": "jumlah", - "number": "nomor", - "string": "string", - "array": "larik", - "required": "diperlukan", - "default": "bawaan", - "default:": "bawaan:", - "aliases:": "istilah lain:", - "choices:": "pilihan:", - "generated-value": "nilai-yang-dihasilkan", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Argumen wajib kurang: hanya %s, minimal %s", - "other": "Argumen wajib kurang: hanya %s, minimal %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Terlalu banyak argumen wajib: ada %s, maksimal %s", - "other": "Terlalu banyak argumen wajib: ada %s, maksimal %s" - }, - "Missing argument value: %s": { - "one": "Kurang argumen: %s", - "other": "Kurang argumen: %s" - }, - "Missing required argument: %s": { - "one": "Kurang argumen wajib: %s", - "other": "Kurang argumen wajib: %s" - }, - "Unknown argument: %s": { - "one": "Argumen tak diketahui: %s", - "other": "Argumen tak diketahui: %s" - }, - "Invalid values:": "Nilai-nilai tidak valid:", - "Argument: %s, Given: %s, Choices: %s": "Argumen: %s, Diberikan: %s, Pilihan: %s", - "Argument check failed: %s": "Pemeriksaan argument gagal: %s", - "Implications failed:": "Implikasi gagal:", - "Not enough arguments following: %s": "Kurang argumen untuk: %s", - "Invalid JSON config file: %s": "Berkas konfigurasi JSON tidak valid: %s", - "Path to JSON config file": "Alamat berkas konfigurasi JSON", - "Show help": "Lihat bantuan", - "Show version number": "Lihat nomor versi", - "Did you mean %s?": "Maksud Anda: %s?", - "Arguments %s and %s are mutually exclusive" : "Argumen %s dan %s saling eksklusif", - "Positionals:": "Posisional-posisional:", - "command": "perintah" -} diff --git a/node_modules/yargs/locales/it.json b/node_modules/yargs/locales/it.json deleted file mode 100644 index fde5756..0000000 --- a/node_modules/yargs/locales/it.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "Comandi:", - "Options:": "Opzioni:", - "Examples:": "Esempi:", - "boolean": "booleano", - "count": "contatore", - "string": "stringa", - "number": "numero", - "array": "vettore", - "required": "richiesto", - "default": "predefinito", - "default:": "predefinito:", - "choices:": "scelte:", - "aliases:": "alias:", - "generated-value": "valore generato", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Numero insufficiente di argomenti non opzione: inseriti %s, richiesti almeno %s", - "other": "Numero insufficiente di argomenti non opzione: inseriti %s, richiesti almeno %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Troppi argomenti non opzione: inseriti %s, massimo possibile %s", - "other": "Troppi argomenti non opzione: inseriti %s, massimo possibile %s" - }, - "Missing argument value: %s": { - "one": "Argomento mancante: %s", - "other": "Argomenti mancanti: %s" - }, - "Missing required argument: %s": { - "one": "Argomento richiesto mancante: %s", - "other": "Argomenti richiesti mancanti: %s" - }, - "Unknown argument: %s": { - "one": "Argomento sconosciuto: %s", - "other": "Argomenti sconosciuti: %s" - }, - "Invalid values:": "Valori non validi:", - "Argument: %s, Given: %s, Choices: %s": "Argomento: %s, Richiesto: %s, Scelte: %s", - "Argument check failed: %s": "Controllo dell'argomento fallito: %s", - "Implications failed:": "Argomenti dipendenti mancanti:", - "Not enough arguments following: %s": "Argomenti insufficienti dopo: %s", - "Invalid JSON config file: %s": "File di configurazione JSON non valido: %s", - "Path to JSON config file": "Percorso del file di configurazione JSON", - "Show help": "Mostra la schermata di aiuto", - "Show version number": "Mostra il numero di versione", - "Did you mean %s?": "Intendi forse %s?" -} diff --git a/node_modules/yargs/locales/ja.json b/node_modules/yargs/locales/ja.json deleted file mode 100644 index 3954ae6..0000000 --- a/node_modules/yargs/locales/ja.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Commands:": "コマンド:", - "Options:": "オプション:", - "Examples:": "例:", - "boolean": "真偽", - "count": "カウント", - "string": "文字列", - "number": "数値", - "array": "配列", - "required": "必須", - "default": "デフォルト", - "default:": "デフォルト:", - "choices:": "選択してください:", - "aliases:": "エイリアス:", - "generated-value": "生成された値", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "オプションではない引数が %s 個では不足しています。少なくとも %s 個の引数が必要です:", - "other": "オプションではない引数が %s 個では不足しています。少なくとも %s 個の引数が必要です:" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "オプションではない引数が %s 個では多すぎます。最大で %s 個までです:", - "other": "オプションではない引数が %s 個では多すぎます。最大で %s 個までです:" - }, - "Missing argument value: %s": { - "one": "引数の値が見つかりません: %s", - "other": "引数の値が見つかりません: %s" - }, - "Missing required argument: %s": { - "one": "必須の引数が見つかりません: %s", - "other": "必須の引数が見つかりません: %s" - }, - "Unknown argument: %s": { - "one": "未知の引数です: %s", - "other": "未知の引数です: %s" - }, - "Invalid values:": "不正な値です:", - "Argument: %s, Given: %s, Choices: %s": "引数は %s です。与えられた値: %s, 選択してください: %s", - "Argument check failed: %s": "引数のチェックに失敗しました: %s", - "Implications failed:": "オプションの組み合わせで不正が生じました:", - "Not enough arguments following: %s": "次の引数が不足しています。: %s", - "Invalid JSON config file: %s": "JSONの設定ファイルが不正です: %s", - "Path to JSON config file": "JSONの設定ファイルまでのpath", - "Show help": "ヘルプを表示", - "Show version number": "バージョンを表示", - "Did you mean %s?": "もしかして %s?", - "Arguments %s and %s are mutually exclusive" : "引数 %s と %s は同時に指定できません", - "Positionals:": "位置:", - "command": "コマンド", - "deprecated": "非推奨", - "deprecated: %s": "非推奨: %s" -} diff --git a/node_modules/yargs/locales/ko.json b/node_modules/yargs/locales/ko.json deleted file mode 100644 index 746bc89..0000000 --- a/node_modules/yargs/locales/ko.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "Commands:": "명령:", - "Options:": "옵션:", - "Examples:": "예시:", - "boolean": "불리언", - "count": "개수", - "string": "문자열", - "number": "숫자", - "array": "배열", - "required": "필수", - "default": "기본값", - "default:": "기본값:", - "choices:": "선택지:", - "aliases:": "별칭:", - "generated-value": "생성된 값", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "옵션이 아닌 인수가 충분하지 않습니다: %s개 입력받음, 최소 %s개 입력 필요", - "other": "옵션이 아닌 인수가 충분하지 않습니다: %s개 입력받음, 최소 %s개 입력 필요" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "옵션이 아닌 인수가 너무 많습니다: %s개 입력받음, 최대 %s개 입력 가능", - "other": "옵션이 아닌 인수가 너무 많습니다: %s개 입력받음, 최대 %s개 입력 가능" - }, - "Missing argument value: %s": { - "one": "인수가 주어지지 않았습니다: %s", - "other": "인수가 주어지지 않았습니다: %s" - }, - "Missing required argument: %s": { - "one": "필수 인수가 주어지지 않았습니다: %s", - "other": "필수 인수가 주어지지 않았습니다: %s" - }, - "Unknown argument: %s": { - "one": "알 수 없는 인수입니다: %s", - "other": "알 수 없는 인수입니다: %s" - }, - "Invalid values:": "유효하지 않은 값:", - "Argument: %s, Given: %s, Choices: %s": "인수: %s, 주어진 값: %s, 선택지: %s", - "Argument check failed: %s": "인수 체크에 실패했습니다: %s", - "Implications failed:": "주어진 인수에 필요한 추가 인수가 주어지지 않았습니다:", - "Not enough arguments following: %s": "다음 인수가 주어지지 않았습니다: %s", - "Invalid JSON config file: %s": "유효하지 않은 JSON 설정 파일: %s", - "Path to JSON config file": "JSON 설정 파일 경로", - "Show help": "도움말 표시", - "Show version number": "버전 표시", - "Did you mean %s?": "%s을(를) 찾으시나요?", - "Arguments %s and %s are mutually exclusive" : "인수 %s과(와) %s은(는) 동시에 지정할 수 없습니다", - "Positionals:": "위치:", - "command": "명령" -} diff --git a/node_modules/yargs/locales/nb.json b/node_modules/yargs/locales/nb.json deleted file mode 100644 index 6f410ed..0000000 --- a/node_modules/yargs/locales/nb.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "Commands:": "Kommandoer:", - "Options:": "Alternativer:", - "Examples:": "Eksempler:", - "boolean": "boolsk", - "count": "antall", - "string": "streng", - "number": "nummer", - "array": "matrise", - "required": "obligatorisk", - "default": "standard", - "default:": "standard:", - "choices:": "valg:", - "generated-value": "generert-verdi", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Ikke nok ikke-alternativ argumenter: fikk %s, trenger minst %s", - "other": "Ikke nok ikke-alternativ argumenter: fikk %s, trenger minst %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "For mange ikke-alternativ argumenter: fikk %s, maksimum %s", - "other": "For mange ikke-alternativ argumenter: fikk %s, maksimum %s" - }, - "Missing argument value: %s": { - "one": "Mangler argument verdi: %s", - "other": "Mangler argument verdier: %s" - }, - "Missing required argument: %s": { - "one": "Mangler obligatorisk argument: %s", - "other": "Mangler obligatoriske argumenter: %s" - }, - "Unknown argument: %s": { - "one": "Ukjent argument: %s", - "other": "Ukjente argumenter: %s" - }, - "Invalid values:": "Ugyldige verdier:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gitt: %s, Valg: %s", - "Argument check failed: %s": "Argumentsjekk mislyktes: %s", - "Implications failed:": "Konsekvensene mislyktes:", - "Not enough arguments following: %s": "Ikke nok følgende argumenter: %s", - "Invalid JSON config file: %s": "Ugyldig JSON konfigurasjonsfil: %s", - "Path to JSON config file": "Bane til JSON konfigurasjonsfil", - "Show help": "Vis hjelp", - "Show version number": "Vis versjonsnummer" -} diff --git a/node_modules/yargs/locales/nl.json b/node_modules/yargs/locales/nl.json deleted file mode 100644 index 9ff95c5..0000000 --- a/node_modules/yargs/locales/nl.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "Commands:": "Commando's:", - "Options:": "Opties:", - "Examples:": "Voorbeelden:", - "boolean": "booleaans", - "count": "aantal", - "string": "string", - "number": "getal", - "array": "lijst", - "required": "verplicht", - "default": "standaard", - "default:": "standaard:", - "choices:": "keuzes:", - "aliases:": "aliassen:", - "generated-value": "gegenereerde waarde", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Niet genoeg niet-optie-argumenten: %s gekregen, minstens %s nodig", - "other": "Niet genoeg niet-optie-argumenten: %s gekregen, minstens %s nodig" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Te veel niet-optie-argumenten: %s gekregen, maximum is %s", - "other": "Te veel niet-optie-argumenten: %s gekregen, maximum is %s" - }, - "Missing argument value: %s": { - "one": "Missende argumentwaarde: %s", - "other": "Missende argumentwaarden: %s" - }, - "Missing required argument: %s": { - "one": "Missend verplicht argument: %s", - "other": "Missende verplichte argumenten: %s" - }, - "Unknown argument: %s": { - "one": "Onbekend argument: %s", - "other": "Onbekende argumenten: %s" - }, - "Invalid values:": "Ongeldige waarden:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gegeven: %s, Keuzes: %s", - "Argument check failed: %s": "Argumentcontrole mislukt: %s", - "Implications failed:": "Ontbrekende afhankelijke argumenten:", - "Not enough arguments following: %s": "Niet genoeg argumenten na: %s", - "Invalid JSON config file: %s": "Ongeldig JSON-config-bestand: %s", - "Path to JSON config file": "Pad naar JSON-config-bestand", - "Show help": "Toon help", - "Show version number": "Toon versienummer", - "Did you mean %s?": "Bedoelde u misschien %s?", - "Arguments %s and %s are mutually exclusive": "Argumenten %s en %s kunnen niet tegelijk gebruikt worden", - "Positionals:": "Positie-afhankelijke argumenten", - "command": "commando" -} diff --git a/node_modules/yargs/locales/nn.json b/node_modules/yargs/locales/nn.json deleted file mode 100644 index 24479ac..0000000 --- a/node_modules/yargs/locales/nn.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "Commands:": "Kommandoar:", - "Options:": "Alternativ:", - "Examples:": "Døme:", - "boolean": "boolsk", - "count": "mengd", - "string": "streng", - "number": "nummer", - "array": "matrise", - "required": "obligatorisk", - "default": "standard", - "default:": "standard:", - "choices:": "val:", - "generated-value": "generert-verdi", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Ikkje nok ikkje-alternativ argument: fekk %s, treng minst %s", - "other": "Ikkje nok ikkje-alternativ argument: fekk %s, treng minst %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "For mange ikkje-alternativ argument: fekk %s, maksimum %s", - "other": "For mange ikkje-alternativ argument: fekk %s, maksimum %s" - }, - "Missing argument value: %s": { - "one": "Manglar argumentverdi: %s", - "other": "Manglar argumentverdiar: %s" - }, - "Missing required argument: %s": { - "one": "Manglar obligatorisk argument: %s", - "other": "Manglar obligatoriske argument: %s" - }, - "Unknown argument: %s": { - "one": "Ukjent argument: %s", - "other": "Ukjende argument: %s" - }, - "Invalid values:": "Ugyldige verdiar:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gjeve: %s, Val: %s", - "Argument check failed: %s": "Argumentsjekk mislukkast: %s", - "Implications failed:": "Konsekvensane mislukkast:", - "Not enough arguments following: %s": "Ikkje nok fylgjande argument: %s", - "Invalid JSON config file: %s": "Ugyldig JSON konfigurasjonsfil: %s", - "Path to JSON config file": "Bane til JSON konfigurasjonsfil", - "Show help": "Vis hjelp", - "Show version number": "Vis versjonsnummer" -} diff --git a/node_modules/yargs/locales/pirate.json b/node_modules/yargs/locales/pirate.json deleted file mode 100644 index dcb5cb7..0000000 --- a/node_modules/yargs/locales/pirate.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "Commands:": "Choose yer command:", - "Options:": "Options for me hearties!", - "Examples:": "Ex. marks the spot:", - "required": "requi-yar-ed", - "Missing required argument: %s": { - "one": "Ye be havin' to set the followin' argument land lubber: %s", - "other": "Ye be havin' to set the followin' arguments land lubber: %s" - }, - "Show help": "Parlay this here code of conduct", - "Show version number": "'Tis the version ye be askin' fer", - "Arguments %s and %s are mutually exclusive" : "Yon scurvy dogs %s and %s be as bad as rum and a prudish wench" -} diff --git a/node_modules/yargs/locales/pl.json b/node_modules/yargs/locales/pl.json deleted file mode 100644 index a41d4bd..0000000 --- a/node_modules/yargs/locales/pl.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "Commands:": "Polecenia:", - "Options:": "Opcje:", - "Examples:": "Przykłady:", - "boolean": "boolean", - "count": "ilość", - "string": "ciąg znaków", - "number": "liczba", - "array": "tablica", - "required": "wymagany", - "default": "domyślny", - "default:": "domyślny:", - "choices:": "dostępne:", - "aliases:": "aliasy:", - "generated-value": "wygenerowana-wartość", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Niewystarczająca ilość argumentów: otrzymano %s, wymagane co najmniej %s", - "other": "Niewystarczająca ilość argumentów: otrzymano %s, wymagane co najmniej %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Zbyt duża ilość argumentów: otrzymano %s, wymagane co najwyżej %s", - "other": "Zbyt duża ilość argumentów: otrzymano %s, wymagane co najwyżej %s" - }, - "Missing argument value: %s": { - "one": "Brak wartości dla argumentu: %s", - "other": "Brak wartości dla argumentów: %s" - }, - "Missing required argument: %s": { - "one": "Brak wymaganego argumentu: %s", - "other": "Brak wymaganych argumentów: %s" - }, - "Unknown argument: %s": { - "one": "Nieznany argument: %s", - "other": "Nieznane argumenty: %s" - }, - "Invalid values:": "Nieprawidłowe wartości:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Otrzymano: %s, Dostępne: %s", - "Argument check failed: %s": "Weryfikacja argumentów nie powiodła się: %s", - "Implications failed:": "Założenia nie zostały spełnione:", - "Not enough arguments following: %s": "Niewystarczająca ilość argumentów następujących po: %s", - "Invalid JSON config file: %s": "Nieprawidłowy plik konfiguracyjny JSON: %s", - "Path to JSON config file": "Ścieżka do pliku konfiguracyjnego JSON", - "Show help": "Pokaż pomoc", - "Show version number": "Pokaż numer wersji", - "Did you mean %s?": "Czy chodziło Ci o %s?", - "Arguments %s and %s are mutually exclusive": "Argumenty %s i %s wzajemnie się wykluczają", - "Positionals:": "Pozycyjne:", - "command": "polecenie" -} diff --git a/node_modules/yargs/locales/pt.json b/node_modules/yargs/locales/pt.json deleted file mode 100644 index 0c8ac99..0000000 --- a/node_modules/yargs/locales/pt.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "Commands:": "Comandos:", - "Options:": "Opções:", - "Examples:": "Exemplos:", - "boolean": "boolean", - "count": "contagem", - "string": "cadeia de caracteres", - "number": "número", - "array": "arranjo", - "required": "requerido", - "default": "padrão", - "default:": "padrão:", - "choices:": "escolhas:", - "generated-value": "valor-gerado", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Argumentos insuficientes não opcionais: Argumento %s, necessário pelo menos %s", - "other": "Argumentos insuficientes não opcionais: Argumento %s, necessário pelo menos %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Excesso de argumentos não opcionais: recebido %s, máximo de %s", - "other": "Excesso de argumentos não opcionais: recebido %s, máximo de %s" - }, - "Missing argument value: %s": { - "one": "Falta valor de argumento: %s", - "other": "Falta valores de argumento: %s" - }, - "Missing required argument: %s": { - "one": "Falta argumento obrigatório: %s", - "other": "Faltando argumentos obrigatórios: %s" - }, - "Unknown argument: %s": { - "one": "Argumento desconhecido: %s", - "other": "Argumentos desconhecidos: %s" - }, - "Invalid values:": "Valores inválidos:", - "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Dado: %s, Escolhas: %s", - "Argument check failed: %s": "Verificação de argumento falhou: %s", - "Implications failed:": "Implicações falharam:", - "Not enough arguments following: %s": "Insuficientes argumentos a seguir: %s", - "Invalid JSON config file: %s": "Arquivo de configuração em JSON esta inválido: %s", - "Path to JSON config file": "Caminho para o arquivo de configuração em JSON", - "Show help": "Mostra ajuda", - "Show version number": "Mostra número de versão", - "Arguments %s and %s are mutually exclusive" : "Argumentos %s e %s são mutualmente exclusivos" -} diff --git a/node_modules/yargs/locales/pt_BR.json b/node_modules/yargs/locales/pt_BR.json deleted file mode 100644 index eae1ec6..0000000 --- a/node_modules/yargs/locales/pt_BR.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "Commands:": "Comandos:", - "Options:": "Opções:", - "Examples:": "Exemplos:", - "boolean": "booleano", - "count": "contagem", - "string": "string", - "number": "número", - "array": "array", - "required": "obrigatório", - "default:": "padrão:", - "choices:": "opções:", - "aliases:": "sinônimos:", - "generated-value": "valor-gerado", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Argumentos insuficientes: Argumento %s, necessário pelo menos %s", - "other": "Argumentos insuficientes: Argumento %s, necessário pelo menos %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Excesso de argumentos: recebido %s, máximo de %s", - "other": "Excesso de argumentos: recebido %s, máximo de %s" - }, - "Missing argument value: %s": { - "one": "Falta valor de argumento: %s", - "other": "Falta valores de argumento: %s" - }, - "Missing required argument: %s": { - "one": "Falta argumento obrigatório: %s", - "other": "Faltando argumentos obrigatórios: %s" - }, - "Unknown argument: %s": { - "one": "Argumento desconhecido: %s", - "other": "Argumentos desconhecidos: %s" - }, - "Invalid values:": "Valores inválidos:", - "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Dado: %s, Opções: %s", - "Argument check failed: %s": "Verificação de argumento falhou: %s", - "Implications failed:": "Implicações falharam:", - "Not enough arguments following: %s": "Argumentos insuficientes a seguir: %s", - "Invalid JSON config file: %s": "Arquivo JSON de configuração inválido: %s", - "Path to JSON config file": "Caminho para o arquivo JSON de configuração", - "Show help": "Exibe ajuda", - "Show version number": "Exibe a versão", - "Did you mean %s?": "Você quis dizer %s?", - "Arguments %s and %s are mutually exclusive" : "Argumentos %s e %s são mutualmente exclusivos", - "Positionals:": "Posicionais:", - "command": "comando" -} diff --git a/node_modules/yargs/locales/ru.json b/node_modules/yargs/locales/ru.json deleted file mode 100644 index d5c9e32..0000000 --- a/node_modules/yargs/locales/ru.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Commands:": "Команды:", - "Options:": "Опции:", - "Examples:": "Примеры:", - "boolean": "булевый тип", - "count": "подсчет", - "string": "строковой тип", - "number": "число", - "array": "массив", - "required": "необходимо", - "default": "по умолчанию", - "default:": "по умолчанию:", - "choices:": "возможности:", - "aliases:": "алиасы:", - "generated-value": "генерированное значение", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Недостаточно неопционных аргументов: есть %s, нужно как минимум %s", - "other": "Недостаточно неопционных аргументов: есть %s, нужно как минимум %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Слишком много неопционных аргументов: есть %s, максимум допустимо %s", - "other": "Слишком много неопционных аргументов: есть %s, максимум допустимо %s" - }, - "Missing argument value: %s": { - "one": "Не хватает значения аргумента: %s", - "other": "Не хватает значений аргументов: %s" - }, - "Missing required argument: %s": { - "one": "Не хватает необходимого аргумента: %s", - "other": "Не хватает необходимых аргументов: %s" - }, - "Unknown argument: %s": { - "one": "Неизвестный аргумент: %s", - "other": "Неизвестные аргументы: %s" - }, - "Invalid values:": "Недействительные значения:", - "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Данное значение: %s, Возможности: %s", - "Argument check failed: %s": "Проверка аргументов не удалась: %s", - "Implications failed:": "Данный аргумент требует следующий дополнительный аргумент:", - "Not enough arguments following: %s": "Недостаточно следующих аргументов: %s", - "Invalid JSON config file: %s": "Недействительный файл конфигурации JSON: %s", - "Path to JSON config file": "Путь к файлу конфигурации JSON", - "Show help": "Показать помощь", - "Show version number": "Показать номер версии", - "Did you mean %s?": "Вы имели в виду %s?", - "Arguments %s and %s are mutually exclusive": "Аргументы %s и %s являются взаимоисключающими", - "Positionals:": "Позиционные аргументы:", - "command": "команда", - "deprecated": "устар.", - "deprecated: %s": "устар.: %s" -} diff --git a/node_modules/yargs/locales/th.json b/node_modules/yargs/locales/th.json deleted file mode 100644 index 33b048e..0000000 --- a/node_modules/yargs/locales/th.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "Commands:": "คอมมาน", - "Options:": "ออฟชั่น", - "Examples:": "ตัวอย่าง", - "boolean": "บูลีน", - "count": "นับ", - "string": "สตริง", - "number": "ตัวเลข", - "array": "อาเรย์", - "required": "จำเป็น", - "default": "ค่าเริ่มต้", - "default:": "ค่าเริ่มต้น", - "choices:": "ตัวเลือก", - "aliases:": "เอเลียส", - "generated-value": "ค่าที่ถูกสร้างขึ้น", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "ใส่อาร์กิวเมนต์ไม่ครบตามจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการอย่างน้อย %s ค่า", - "other": "ใส่อาร์กิวเมนต์ไม่ครบตามจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการอย่างน้อย %s ค่า" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "ใส่อาร์กิวเมนต์เกินจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการมากที่สุด %s ค่า", - "other": "ใส่อาร์กิวเมนต์เกินจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการมากที่สุด %s ค่า" - }, - "Missing argument value: %s": { - "one": "ค่าอาร์กิวเมนต์ที่ขาดไป: %s", - "other": "ค่าอาร์กิวเมนต์ที่ขาดไป: %s" - }, - "Missing required argument: %s": { - "one": "อาร์กิวเมนต์จำเป็นที่ขาดไป: %s", - "other": "อาร์กิวเมนต์จำเป็นที่ขาดไป: %s" - }, - "Unknown argument: %s": { - "one": "อาร์กิวเมนต์ที่ไม่รู้จัก: %s", - "other": "อาร์กิวเมนต์ที่ไม่รู้จัก: %s" - }, - "Invalid values:": "ค่าไม่ถูกต้อง:", - "Argument: %s, Given: %s, Choices: %s": "อาร์กิวเมนต์: %s, ได้รับ: %s, ตัวเลือก: %s", - "Argument check failed: %s": "ตรวจสอบพบอาร์กิวเมนต์ที่ไม่ถูกต้อง: %s", - "Implications failed:": "Implications ไม่สำเร็จ:", - "Not enough arguments following: %s": "ใส่อาร์กิวเมนต์ไม่ครบ: %s", - "Invalid JSON config file: %s": "ไฟล์คอนฟิค JSON ไม่ถูกต้อง: %s", - "Path to JSON config file": "พาทไฟล์คอนฟิค JSON", - "Show help": "ขอความช่วยเหลือ", - "Show version number": "แสดงตัวเลขเวอร์ชั่น", - "Did you mean %s?": "คุณหมายถึง %s?" -} diff --git a/node_modules/yargs/locales/tr.json b/node_modules/yargs/locales/tr.json deleted file mode 100644 index 0d0d2cc..0000000 --- a/node_modules/yargs/locales/tr.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "Commands:": "Komutlar:", - "Options:": "Seçenekler:", - "Examples:": "Örnekler:", - "boolean": "boolean", - "count": "sayı", - "string": "string", - "number": "numara", - "array": "array", - "required": "zorunlu", - "default": "varsayılan", - "default:": "varsayılan:", - "choices:": "seçimler:", - "aliases:": "takma adlar:", - "generated-value": "oluşturulan-değer", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Seçenek dışı argümanlar yetersiz: %s bulundu, %s gerekli", - "other": "Seçenek dışı argümanlar yetersiz: %s bulundu, %s gerekli" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Seçenek dışı argümanlar gereğinden fazla: %s bulundu, azami %s", - "other": "Seçenek dışı argümanlar gereğinden fazla: %s bulundu, azami %s" - }, - "Missing argument value: %s": { - "one": "Eksik argüman değeri: %s", - "other": "Eksik argüman değerleri: %s" - }, - "Missing required argument: %s": { - "one": "Eksik zorunlu argüman: %s", - "other": "Eksik zorunlu argümanlar: %s" - }, - "Unknown argument: %s": { - "one": "Bilinmeyen argüman: %s", - "other": "Bilinmeyen argümanlar: %s" - }, - "Invalid values:": "Geçersiz değerler:", - "Argument: %s, Given: %s, Choices: %s": "Argüman: %s, Verilen: %s, Seçimler: %s", - "Argument check failed: %s": "Argüman kontrolü başarısız oldu: %s", - "Implications failed:": "Sonuçlar başarısız oldu:", - "Not enough arguments following: %s": "%s için yeterli argüman bulunamadı", - "Invalid JSON config file: %s": "Geçersiz JSON yapılandırma dosyası: %s", - "Path to JSON config file": "JSON yapılandırma dosya konumu", - "Show help": "Yardım detaylarını göster", - "Show version number": "Versiyon detaylarını göster", - "Did you mean %s?": "Bunu mu demek istediniz: %s?", - "Positionals:": "Sıralılar:", - "command": "komut" -} diff --git a/node_modules/yargs/locales/uk_UA.json b/node_modules/yargs/locales/uk_UA.json deleted file mode 100644 index 0af0e99..0000000 --- a/node_modules/yargs/locales/uk_UA.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Commands:": "Команди:", - "Options:": "Опції:", - "Examples:": "Приклади:", - "boolean": "boolean", - "count": "кількість", - "string": "строка", - "number": "число", - "array": "масива", - "required": "обов'язково", - "default": "за замовчуванням", - "default:": "за замовчуванням:", - "choices:": "доступні варіанти:", - "aliases:": "псевдоніми:", - "generated-value": "згенероване значення", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "Недостатньо аргументів: наразі %s, потрібно %s або більше", - "other": "Недостатньо аргументів: наразі %s, потрібно %s або більше" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "Забагато аргументів: наразі %s, максимум %s", - "other": "Too many non-option arguments: наразі %s, максимум of %s" - }, - "Missing argument value: %s": { - "one": "Відсутнє значення для аргументу: %s", - "other": "Відсутні значення для аргументу: %s" - }, - "Missing required argument: %s": { - "one": "Відсутній обов'язковий аргумент: %s", - "other": "Відсутні обов'язкові аргументи: %s" - }, - "Unknown argument: %s": { - "one": "Аргумент %s не підтримується", - "other": "Аргументи %s не підтримуються" - }, - "Invalid values:": "Некоректні значення:", - "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Введено: %s, Доступні варіанти: %s", - "Argument check failed: %s": "Аргумент не пройшов перевірку: %s", - "Implications failed:": "Відсутні залежні аргументи:", - "Not enough arguments following: %s": "Не достатньо аргументів після: %s", - "Invalid JSON config file: %s": "Некоректний JSON-файл конфігурації: %s", - "Path to JSON config file": "Шлях до JSON-файлу конфігурації", - "Show help": "Показати довідку", - "Show version number": "Показати версію", - "Did you mean %s?": "Можливо, ви мали на увазі %s?", - "Arguments %s and %s are mutually exclusive" : "Аргументи %s та %s взаємовиключні", - "Positionals:": "Позиційні:", - "command": "команда", - "deprecated": "застарілий", - "deprecated: %s": "застарілий: %s" -} diff --git a/node_modules/yargs/locales/uz.json b/node_modules/yargs/locales/uz.json deleted file mode 100644 index 0d07168..0000000 --- a/node_modules/yargs/locales/uz.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "Commands:": "Buyruqlar:", - "Options:": "Imkoniyatlar:", - "Examples:": "Misollar:", - "boolean": "boolean", - "count": "sanoq", - "string": "satr", - "number": "raqam", - "array": "massiv", - "required": "majburiy", - "default": "boshlang'ich", - "default:": "boshlang'ich:", - "choices:": "tanlovlar:", - "aliases:": "taxalluslar:", - "generated-value": "yaratilgan-qiymat", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "No-imkoniyat argumentlar yetarli emas: berilgan %s, minimum %s", - "other": "No-imkoniyat argumentlar yetarli emas: berilgan %s, minimum %s" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "No-imkoniyat argumentlar juda ko'p: berilgan %s, maksimum %s", - "other": "No-imkoniyat argumentlar juda ko'p: got %s, maksimum %s" - }, - "Missing argument value: %s": { - "one": "Argument qiymati berilmagan: %s", - "other": "Argument qiymatlari berilmagan: %s" - }, - "Missing required argument: %s": { - "one": "Majburiy argument berilmagan: %s", - "other": "Majburiy argumentlar berilmagan: %s" - }, - "Unknown argument: %s": { - "one": "Noma'lum argument berilmagan: %s", - "other": "Noma'lum argumentlar berilmagan: %s" - }, - "Invalid values:": "Nosoz qiymatlar:", - "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Berilgan: %s, Tanlovlar: %s", - "Argument check failed: %s": "Muvaffaqiyatsiz argument tekshiruvi: %s", - "Implications failed:": "Bog'liq argumentlar berilmagan:", - "Not enough arguments following: %s": "Quyidagi argumentlar yetarli emas: %s", - "Invalid JSON config file: %s": "Nosoz JSON konfiguratsiya fayli: %s", - "Path to JSON config file": "JSON konfiguratsiya fayli joylashuvi", - "Show help": "Yordam ko'rsatish", - "Show version number": "Versiyani ko'rsatish", - "Did you mean %s?": "%s ni nazarda tutyapsizmi?", - "Arguments %s and %s are mutually exclusive" : "%s va %s argumentlari alohida", - "Positionals:": "Positsionallar:", - "command": "buyruq", - "deprecated": "eskirgan", - "deprecated: %s": "eskirgan: %s" - } - \ No newline at end of file diff --git a/node_modules/yargs/locales/zh_CN.json b/node_modules/yargs/locales/zh_CN.json deleted file mode 100644 index 257d26b..0000000 --- a/node_modules/yargs/locales/zh_CN.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "Commands:": "命令:", - "Options:": "选项:", - "Examples:": "示例:", - "boolean": "布尔", - "count": "计数", - "string": "字符串", - "number": "数字", - "array": "数组", - "required": "必需", - "default": "默认值", - "default:": "默认值:", - "choices:": "可选值:", - "generated-value": "生成的值", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "缺少 non-option 参数:传入了 %s 个, 至少需要 %s 个", - "other": "缺少 non-option 参数:传入了 %s 个, 至少需要 %s 个" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "non-option 参数过多:传入了 %s 个, 最大允许 %s 个", - "other": "non-option 参数过多:传入了 %s 个, 最大允许 %s 个" - }, - "Missing argument value: %s": { - "one": "没有给此选项指定值:%s", - "other": "没有给这些选项指定值:%s" - }, - "Missing required argument: %s": { - "one": "缺少必须的选项:%s", - "other": "缺少这些必须的选项:%s" - }, - "Unknown argument: %s": { - "one": "无法识别的选项:%s", - "other": "无法识别这些选项:%s" - }, - "Invalid values:": "无效的选项值:", - "Argument: %s, Given: %s, Choices: %s": "选项名称: %s, 传入的值: %s, 可选的值:%s", - "Argument check failed: %s": "选项值验证失败:%s", - "Implications failed:": "缺少依赖的选项:", - "Not enough arguments following: %s": "没有提供足够的值给此选项:%s", - "Invalid JSON config file: %s": "无效的 JSON 配置文件:%s", - "Path to JSON config file": "JSON 配置文件的路径", - "Show help": "显示帮助信息", - "Show version number": "显示版本号", - "Did you mean %s?": "是指 %s?", - "Arguments %s and %s are mutually exclusive" : "选项 %s 和 %s 是互斥的", - "Positionals:": "位置:", - "command": "命令" -} diff --git a/node_modules/yargs/locales/zh_TW.json b/node_modules/yargs/locales/zh_TW.json deleted file mode 100644 index e38495d..0000000 --- a/node_modules/yargs/locales/zh_TW.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Commands:": "命令:", - "Options:": "選項:", - "Examples:": "範例:", - "boolean": "布林", - "count": "次數", - "string": "字串", - "number": "數字", - "array": "陣列", - "required": "必填", - "default": "預設值", - "default:": "預設值:", - "choices:": "可選值:", - "aliases:": "別名:", - "generated-value": "生成的值", - "Not enough non-option arguments: got %s, need at least %s": { - "one": "non-option 引數不足:只傳入了 %s 個, 至少要 %s 個", - "other": "non-option 引數不足:只傳入了 %s 個, 至少要 %s 個" - }, - "Too many non-option arguments: got %s, maximum of %s": { - "one": "non-option 引數過多:傳入了 %s 個, 但最多 %s 個", - "other": "non-option 引數過多:傳入了 %s 個, 但最多 %s 個" - }, - "Missing argument value: %s": { - "one": "此引數無指定值:%s", - "other": "這些引數無指定值:%s" - }, - "Missing required argument: %s": { - "one": "缺少必須的引數:%s", - "other": "缺少這些必須的引數:%s" - }, - "Unknown argument: %s": { - "one": "未知的引數:%s", - "other": "未知的引數:%s" - }, - "Invalid values:": "無效的選項值:", - "Argument: %s, Given: %s, Choices: %s": "引數名稱: %s, 傳入的值: %s, 可選的值:%s", - "Argument check failed: %s": "引數驗證失敗:%s", - "Implications failed:": "缺少依賴引數:", - "Not enough arguments following: %s": "沒有提供足夠的值給此引數:%s", - "Invalid JSON config file: %s": "無效的 JSON 設置文件:%s", - "Path to JSON config file": "JSON 設置文件的路徑", - "Show help": "顯示說明", - "Show version number": "顯示版本", - "Did you mean %s?": "您是指 %s 嗎?", - "Arguments %s and %s are mutually exclusive" : "引數 %s 和 %s 互斥", - "Positionals:": "位置:", - "command": "命令", - "deprecated": "已淘汰", - "deprecated: %s": "已淘汰:%s" - } diff --git a/node_modules/yargs/package.json b/node_modules/yargs/package.json deleted file mode 100644 index 389cc6b..0000000 --- a/node_modules/yargs/package.json +++ /dev/null @@ -1,123 +0,0 @@ -{ - "name": "yargs", - "version": "17.7.2", - "description": "yargs the modern, pirate-themed, successor to optimist.", - "main": "./index.cjs", - "exports": { - "./package.json": "./package.json", - ".": [ - { - "import": "./index.mjs", - "require": "./index.cjs" - }, - "./index.cjs" - ], - "./helpers": { - "import": "./helpers/helpers.mjs", - "require": "./helpers/index.js" - }, - "./browser": { - "import": "./browser.mjs", - "types": "./browser.d.ts" - }, - "./yargs": [ - { - "import": "./yargs.mjs", - "require": "./yargs" - }, - "./yargs" - ] - }, - "type": "module", - "module": "./index.mjs", - "contributors": [ - { - "name": "Yargs Contributors", - "url": "https://github.com/yargs/yargs/graphs/contributors" - } - ], - "files": [ - "browser.mjs", - "browser.d.ts", - "index.cjs", - "helpers/*.js", - "helpers/*", - "index.mjs", - "yargs", - "yargs.mjs", - "build", - "locales", - "LICENSE", - "lib/platform-shims/*.mjs", - "!*.d.ts", - "!**/*.d.ts" - ], - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "devDependencies": { - "@types/chai": "^4.2.11", - "@types/mocha": "^9.0.0", - "@types/node": "^18.0.0", - "c8": "^7.7.0", - "chai": "^4.2.0", - "chalk": "^4.0.0", - "coveralls": "^3.0.9", - "cpr": "^3.0.1", - "cross-env": "^7.0.2", - "cross-spawn": "^7.0.0", - "eslint": "^7.23.0", - "gts": "^3.0.0", - "hashish": "0.0.4", - "mocha": "^9.0.0", - "rimraf": "^3.0.2", - "rollup": "^2.23.0", - "rollup-plugin-cleanup": "^3.1.1", - "rollup-plugin-terser": "^7.0.2", - "rollup-plugin-ts": "^2.0.4", - "typescript": "^4.0.2", - "which": "^2.0.0", - "yargs-test-extends": "^1.0.1" - }, - "scripts": { - "fix": "gts fix && npm run fix:js", - "fix:js": "eslint . --ext cjs --ext mjs --ext js --fix", - "posttest": "npm run check", - "test": "c8 mocha --enable-source-maps ./test/*.cjs --require ./test/before.cjs --timeout=12000 --check-leaks", - "test:esm": "c8 mocha --enable-source-maps ./test/esm/*.mjs --check-leaks", - "coverage": "c8 report --check-coverage", - "prepare": "npm run compile", - "pretest": "npm run compile -- -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", - "compile": "rimraf build && tsc", - "postcompile": "npm run build:cjs", - "build:cjs": "rollup -c rollup.config.cjs", - "postbuild:cjs": "rimraf ./build/index.cjs.d.ts", - "check": "gts lint && npm run check:js", - "check:js": "eslint . --ext cjs --ext mjs --ext js", - "clean": "gts clean" - }, - "repository": { - "type": "git", - "url": "https://github.com/yargs/yargs.git" - }, - "homepage": "https://yargs.js.org/", - "keywords": [ - "argument", - "args", - "option", - "parser", - "parsing", - "cli", - "command" - ], - "license": "MIT", - "engines": { - "node": ">=12" - } -} diff --git a/node_modules/yargs/yargs b/node_modules/yargs/yargs deleted file mode 100644 index 8460d10..0000000 --- a/node_modules/yargs/yargs +++ /dev/null @@ -1,9 +0,0 @@ -// TODO: consolidate on using a helpers file at some point in the future, which -// is the approach currently used to export Parser and applyExtends for ESM: -const {applyExtends, cjsPlatformShim, Parser, Yargs, processArgv} = require('./build/index.cjs') -Yargs.applyExtends = (config, cwd, mergeExtends) => { - return applyExtends(config, cwd, mergeExtends, cjsPlatformShim) -} -Yargs.hideBin = processArgv.hideBin -Yargs.Parser = Parser -module.exports = Yargs diff --git a/node_modules/yargs/yargs.mjs b/node_modules/yargs/yargs.mjs deleted file mode 100644 index 6d9f390..0000000 --- a/node_modules/yargs/yargs.mjs +++ /dev/null @@ -1,10 +0,0 @@ -// TODO: consolidate on using a helpers file at some point in the future, which -// is the approach currently used to export Parser and applyExtends for ESM: -import pkg from './build/index.cjs'; -const {applyExtends, cjsPlatformShim, Parser, processArgv, Yargs} = pkg; -Yargs.applyExtends = (config, cwd, mergeExtends) => { - return applyExtends(config, cwd, mergeExtends, cjsPlatformShim); -}; -Yargs.hideBin = processArgv.hideBin; -Yargs.Parser = Parser; -export default Yargs;