From 3ceeb574c3a494fcea8a6674ab906925c413d50b Mon Sep 17 00:00:00 2001 From: npm-robot Date: Thu, 28 Apr 2022 18:41:15 +0530 Subject: [PATCH] deps: upgrade npm to 8.8.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/42886 Reviewed-By: Rich Trott Reviewed-By: Mohammed Keyvanzadeh Reviewed-By: Mestery Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen --- deps/npm/bin/npx | 3 + deps/npm/docs/content/commands/npm-audit.md | 12 + deps/npm/docs/content/commands/npm-ci.md | 5 +- deps/npm/docs/content/commands/npm-dedupe.md | 12 + .../docs/content/commands/npm-find-dupes.md | 12 + .../docs/content/commands/npm-install-test.md | 12 + deps/npm/docs/content/commands/npm-install.md | 16 +- deps/npm/docs/content/commands/npm-link.md | 12 + deps/npm/docs/content/commands/npm-ls.md | 12 + deps/npm/docs/content/commands/npm-prune.md | 12 + deps/npm/docs/content/commands/npm-rebuild.md | 12 + .../docs/content/commands/npm-uninstall.md | 12 + deps/npm/docs/content/commands/npm-update.md | 12 + deps/npm/docs/content/using-npm/config.md | 14 + deps/npm/docs/content/using-npm/scope.md | 20 +- deps/npm/docs/output/commands/npm-audit.html | 12 +- deps/npm/docs/output/commands/npm-ci.html | 5 +- deps/npm/docs/output/commands/npm-dedupe.html | 12 +- .../docs/output/commands/npm-find-dupes.html | 12 +- .../output/commands/npm-install-test.html | 12 +- .../npm/docs/output/commands/npm-install.html | 16 +- deps/npm/docs/output/commands/npm-link.html | 12 +- deps/npm/docs/output/commands/npm-ls.html | 14 +- deps/npm/docs/output/commands/npm-prune.html | 12 +- .../npm/docs/output/commands/npm-rebuild.html | 12 +- .../docs/output/commands/npm-uninstall.html | 12 +- deps/npm/docs/output/commands/npm-update.html | 12 +- deps/npm/docs/output/commands/npm.html | 2 +- deps/npm/docs/output/using-npm/config.html | 16 +- deps/npm/docs/output/using-npm/scope.html | 19 +- deps/npm/lib/arborist-cmd.js | 1 + deps/npm/lib/commands/deprecate.js | 2 +- deps/npm/lib/commands/diff.js | 11 +- deps/npm/lib/commands/dist-tag.js | 9 +- deps/npm/lib/commands/exec.js | 8 +- deps/npm/lib/commands/help-search.js | 4 +- deps/npm/lib/commands/help.js | 5 +- deps/npm/lib/commands/install.js | 6 + deps/npm/lib/commands/owner.js | 64 +- deps/npm/lib/commands/publish.js | 31 +- deps/npm/lib/utils/config/definitions.js | 13 + deps/npm/lib/utils/log-file.js | 5 +- deps/npm/man/man1/npm-audit.1 | 12 + deps/npm/man/man1/npm-ci.1 | 5 +- deps/npm/man/man1/npm-dedupe.1 | 12 + deps/npm/man/man1/npm-find-dupes.1 | 12 + deps/npm/man/man1/npm-install-test.1 | 12 + deps/npm/man/man1/npm-install.1 | 16 +- deps/npm/man/man1/npm-link.1 | 12 + deps/npm/man/man1/npm-ls.1 | 14 +- deps/npm/man/man1/npm-prune.1 | 12 + deps/npm/man/man1/npm-rebuild.1 | 12 + deps/npm/man/man1/npm-uninstall.1 | 12 + deps/npm/man/man1/npm-update.1 | 12 + deps/npm/man/man1/npm.1 | 2 +- deps/npm/man/man7/config.7 | 14 + deps/npm/man/man7/scope.7 | 24 +- .../node_modules/{ => @colors}/colors/LICENSE | 1 + .../colors/examples/normal-usage.js | 2 +- .../colors/examples/safe-string.js | 3 +- .../{ => @colors}/colors/index.d.ts | 4 +- .../{ => @colors}/colors/lib/colors.js | 2 +- .../{ => @colors}/colors/lib/custom/trap.js | 0 .../{ => @colors}/colors/lib/custom/zalgo.js | 1 - .../colors/lib/extendStringPrototype.js | 0 .../{ => @colors}/colors/lib/index.js | 0 .../{ => @colors}/colors/lib/maps/america.js | 0 .../{ => @colors}/colors/lib/maps/rainbow.js | 1 - .../{ => @colors}/colors/lib/maps/random.js | 0 .../{ => @colors}/colors/lib/maps/zebra.js | 0 .../{ => @colors}/colors/lib/styles.js | 0 .../colors/lib/system/has-flag.js | 0 .../colors/lib/system/supports-colors.js | 0 .../{ => @colors}/colors/package.json | 14 +- .../{ => @colors}/colors/safe.d.ts | 0 .../node_modules/{ => @colors}/colors/safe.js | 0 .../colors/themes/generic-logging.js | 0 .../arborist/lib/arborist/build-ideal-tree.js | 22 +- .../arborist/lib/arborist/load-actual.js | 1 + .../arborist/lib/arborist/load-virtual.js | 2 + .../@npmcli/arborist/lib/dep-valid.js | 16 +- .../node_modules/@npmcli/arborist/lib/node.js | 5 + .../@npmcli/arborist/lib/place-dep.js | 3 + .../@npmcli/arborist/package.json | 14 +- .../@npmcli/map-workspaces/package.json | 30 +- deps/npm/node_modules/cacache/lib/verify.js | 4 +- deps/npm/node_modules/cacache/package.json | 8 +- deps/npm/node_modules/cli-table3/index.d.ts | 1 + deps/npm/node_modules/cli-table3/package.json | 8 +- deps/npm/node_modules/cli-table3/src/cell.js | 63 +- deps/npm/node_modules/cli-table3/src/debug.js | 28 + .../cli-table3/src/layout-manager.js | 57 +- deps/npm/node_modules/cli-table3/src/table.js | 33 +- deps/npm/node_modules/cli-table3/src/utils.js | 38 +- deps/npm/node_modules/glob/LICENSE | 8 +- deps/npm/node_modules/glob/common.js | 2 + deps/npm/node_modules/glob/glob.js | 5 +- deps/npm/node_modules/glob/package.json | 12 +- deps/npm/node_modules/glob/sync.js | 5 +- .../node_modules/libnpmaccess/package.json | 4 +- deps/npm/node_modules/libnpmdiff/package.json | 4 +- deps/npm/node_modules/libnpmexec/lib/index.js | 73 +- .../libnpmexec/lib/manifest-missing.js | 19 - deps/npm/node_modules/libnpmexec/package.json | 8 +- deps/npm/node_modules/libnpmfund/package.json | 4 +- deps/npm/node_modules/libnpmhook/package.json | 4 +- deps/npm/node_modules/libnpmorg/package.json | 4 +- deps/npm/node_modules/libnpmpack/package.json | 4 +- .../libnpmpublish/lib/unpublish.js | 21 +- .../node_modules/libnpmpublish/package.json | 8 +- .../node_modules/libnpmsearch/package.json | 4 +- deps/npm/node_modules/libnpmteam/package.json | 4 +- .../node_modules/libnpmversion/package.json | 8 +- deps/npm/node_modules/lru-cache/index.js | 57 +- deps/npm/node_modules/lru-cache/package.json | 8 +- .../node_modules/brace-expansion/LICENSE | 0 .../node_modules/brace-expansion/index.js | 0 .../node_modules/brace-expansion/package.json | 0 .../node-gyp/node_modules/glob/LICENSE | 21 + .../node-gyp/node_modules/glob/common.js | 236 +++ .../node-gyp/node_modules/glob/glob.js | 787 ++++++++++ .../node-gyp/node_modules/glob/package.json | 52 + .../node-gyp/node_modules/glob/sync.js | 483 ++++++ .../node_modules/minimatch/LICENSE | 0 .../node_modules/minimatch/minimatch.js | 0 .../node_modules/minimatch/package.json | 0 .../node_modules/npm-packlist/lib/index.js | 3 +- .../node_modules/npm-packlist/package.json | 8 +- .../npm/node_modules/npm-profile/package.json | 26 +- .../npm-registry-fetch/lib/errors.js | 2 +- .../npm-registry-fetch/package.json | 10 +- deps/npm/node_modules/npmlog/package.json | 24 +- .../read-package-json/lib/read-json.js | 46 +- .../read-package-json/package.json | 26 +- .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/index.js | 200 +++ .../node_modules/brace-expansion/package.json | 47 + .../rimraf/node_modules/glob/LICENSE | 21 + .../rimraf/node_modules/glob/common.js | 236 +++ .../rimraf/node_modules/glob/glob.js | 787 ++++++++++ .../rimraf/node_modules/glob/package.json | 52 + .../rimraf/node_modules/glob/sync.js | 483 ++++++ .../rimraf/node_modules/minimatch/LICENSE | 15 + .../node_modules/minimatch/minimatch.js | 947 ++++++++++++ .../node_modules/minimatch/package.json | 33 + deps/npm/node_modules/semver/bin/semver.js | 3 +- .../npm/node_modules/semver/classes/semver.js | 2 +- deps/npm/node_modules/semver/functions/inc.js | 5 +- .../semver/node_modules/lru-cache/LICENSE | 15 + .../semver/node_modules/lru-cache/index.js | 334 +++++ .../node_modules/lru-cache/package.json | 34 + deps/npm/node_modules/semver/package.json | 11 +- deps/npm/package.json | 24 +- .../test/lib/commands/audit.js.test.cjs | 27 +- .../test/lib/commands/config.js.test.cjs | 2 + .../test/lib/commands/ls.js.test.cjs | 32 - .../test/lib/commands/owner.js.test.cjs | 20 - .../test/lib/commands/publish.js.test.cjs | 336 +++-- .../test/lib/load-all-commands.js.test.cjs | 24 +- .../lib/utils/config/definitions.js.test.cjs | 14 + .../lib/utils/config/describe-all.js.test.cjs | 14 + .../test/lib/utils/npm-usage.js.test.cjs | 24 +- deps/npm/test/fixtures/mock-npm.js | 2 +- deps/npm/test/fixtures/mock-registry.js | 41 +- deps/npm/test/lib/commands/audit.js | 90 +- deps/npm/test/lib/commands/deprecate.js | 177 ++- deps/npm/test/lib/commands/diff.js | 2 - deps/npm/test/lib/commands/exec.js | 435 ++++-- deps/npm/test/lib/commands/install.js | 17 + deps/npm/test/lib/commands/owner.js | 994 +++++-------- deps/npm/test/lib/commands/publish.js | 1321 +++++++---------- deps/npm/test/lib/commands/unpublish.js | 38 +- deps/npm/test/lib/utils/config/definitions.js | 3 +- deps/npm/test/lib/utils/read-package-name.js | 33 - 174 files changed, 7767 insertions(+), 2200 deletions(-) rename deps/npm/node_modules/{ => @colors}/colors/LICENSE (96%) rename deps/npm/node_modules/{ => @colors}/colors/examples/normal-usage.js (98%) rename deps/npm/node_modules/{ => @colors}/colors/examples/safe-string.js (98%) rename deps/npm/node_modules/{ => @colors}/colors/index.d.ts (96%) rename deps/npm/node_modules/{ => @colors}/colors/lib/colors.js (99%) rename deps/npm/node_modules/{ => @colors}/colors/lib/custom/trap.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/custom/zalgo.js (99%) rename deps/npm/node_modules/{ => @colors}/colors/lib/extendStringPrototype.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/index.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/maps/america.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/maps/rainbow.js (99%) rename deps/npm/node_modules/{ => @colors}/colors/lib/maps/random.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/maps/zebra.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/styles.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/system/has-flag.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/lib/system/supports-colors.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/package.json (68%) rename deps/npm/node_modules/{ => @colors}/colors/safe.d.ts (100%) rename deps/npm/node_modules/{ => @colors}/colors/safe.js (100%) rename deps/npm/node_modules/{ => @colors}/colors/themes/generic-logging.js (100%) create mode 100644 deps/npm/node_modules/cli-table3/src/debug.js delete mode 100644 deps/npm/node_modules/libnpmexec/lib/manifest-missing.js rename deps/npm/node_modules/{glob => node-gyp}/node_modules/brace-expansion/LICENSE (100%) rename deps/npm/node_modules/{glob => node-gyp}/node_modules/brace-expansion/index.js (100%) rename deps/npm/node_modules/{glob => node-gyp}/node_modules/brace-expansion/package.json (100%) create mode 100644 deps/npm/node_modules/node-gyp/node_modules/glob/LICENSE create mode 100644 deps/npm/node_modules/node-gyp/node_modules/glob/common.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/glob/glob.js create mode 100644 deps/npm/node_modules/node-gyp/node_modules/glob/package.json create mode 100644 deps/npm/node_modules/node-gyp/node_modules/glob/sync.js rename deps/npm/node_modules/{glob => node-gyp}/node_modules/minimatch/LICENSE (100%) rename deps/npm/node_modules/{glob => node-gyp}/node_modules/minimatch/minimatch.js (100%) rename deps/npm/node_modules/{glob => node-gyp}/node_modules/minimatch/package.json (100%) create mode 100644 deps/npm/node_modules/rimraf/node_modules/brace-expansion/LICENSE create mode 100644 deps/npm/node_modules/rimraf/node_modules/brace-expansion/index.js create mode 100644 deps/npm/node_modules/rimraf/node_modules/brace-expansion/package.json create mode 100644 deps/npm/node_modules/rimraf/node_modules/glob/LICENSE create mode 100644 deps/npm/node_modules/rimraf/node_modules/glob/common.js create mode 100644 deps/npm/node_modules/rimraf/node_modules/glob/glob.js create mode 100644 deps/npm/node_modules/rimraf/node_modules/glob/package.json create mode 100644 deps/npm/node_modules/rimraf/node_modules/glob/sync.js create mode 100644 deps/npm/node_modules/rimraf/node_modules/minimatch/LICENSE create mode 100644 deps/npm/node_modules/rimraf/node_modules/minimatch/minimatch.js create mode 100644 deps/npm/node_modules/rimraf/node_modules/minimatch/package.json create mode 100644 deps/npm/node_modules/semver/node_modules/lru-cache/LICENSE create mode 100644 deps/npm/node_modules/semver/node_modules/lru-cache/index.js create mode 100644 deps/npm/node_modules/semver/node_modules/lru-cache/package.json delete mode 100644 deps/npm/tap-snapshots/test/lib/commands/owner.js.test.cjs delete mode 100644 deps/npm/test/lib/utils/read-package-name.js diff --git a/deps/npm/bin/npx b/deps/npm/bin/npx index 4b58a104b9e421..a34e3459b5a701 100755 --- a/deps/npm/bin/npx +++ b/deps/npm/bin/npx @@ -12,6 +12,9 @@ case `uname` in esac NODE_EXE="$basedir/node.exe" +if ! [ -x "$NODE_EXE" ]; then + NODE_EXE="$basedir/node" +fi if ! [ -x "$NODE_EXE" ]; then NODE_EXE=node fi diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md index 0f164ac9d3ec51..9d09a4107fdc7d 100644 --- a/deps/npm/docs/content/commands/npm-audit.md +++ b/deps/npm/docs/content/commands/npm-audit.md @@ -399,6 +399,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-ci.md b/deps/npm/docs/content/commands/npm-ci.md index b4ce811869bb2e..2bb542a725b5dc 100644 --- a/deps/npm/docs/content/commands/npm-ci.md +++ b/deps/npm/docs/content/commands/npm-ci.md @@ -48,8 +48,9 @@ In short, the main differences between using `npm install` and `npm ci` are: NOTE: If you create your `package-lock.json` file by running `npm install` with flags that can affect the shape of your dependency tree, such as -`--legacy-peer-deps`, you _must_ provide the same flags to `npm ci` or you -are likely to encounter errors. An easy way to do this is to run +`--legacy-peer-deps` or `--install-links`, you _must_ provide the same +flags to `npm ci` or you are likely to encounter errors. An easy way to do +this is to run, for example, `npm config set legacy-peer-deps=true --location=project` and commit the `.npmrc` file to your repo. diff --git a/deps/npm/docs/content/commands/npm-dedupe.md b/deps/npm/docs/content/commands/npm-dedupe.md index b9768c25db88d6..f816b99433581f 100644 --- a/deps/npm/docs/content/commands/npm-dedupe.md +++ b/deps/npm/docs/content/commands/npm-dedupe.md @@ -310,6 +310,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-find-dupes.md b/deps/npm/docs/content/commands/npm-find-dupes.md index 3549be47daae9c..a92c57bd7e183b 100644 --- a/deps/npm/docs/content/commands/npm-find-dupes.md +++ b/deps/npm/docs/content/commands/npm-find-dupes.md @@ -234,6 +234,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-install-test.md b/deps/npm/docs/content/commands/npm-install-test.md index 8975fc4ce61dec..931ff050718e10 100644 --- a/deps/npm/docs/content/commands/npm-install-test.md +++ b/deps/npm/docs/content/commands/npm-install-test.md @@ -319,6 +319,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 259ac41eaf05df..c0a1272c5503d1 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -91,12 +91,12 @@ into a tarball (b). *npm will not install the package dependencies* in the directory ``, but it will create a symlink to ``. - > NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use [`npm pack`](/commands/npm-pack) while in the `` directory, and then install the resulting tarball instead of the `` using `npm install ` + > NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use the `--install-links` option. Example: ```bash - npm install ../../other-package + npm install ../../other-package --install-links npm install ./sub-package ``` @@ -709,6 +709,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### Algorithm diff --git a/deps/npm/docs/content/commands/npm-link.md b/deps/npm/docs/content/commands/npm-link.md index fb7e46de04a090..fb2b23921b0445 100644 --- a/deps/npm/docs/content/commands/npm-link.md +++ b/deps/npm/docs/content/commands/npm-link.md @@ -387,6 +387,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 8d4799777e20f0..ded8c0c0d26ef1 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -285,6 +285,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-prune.md b/deps/npm/docs/content/commands/npm-prune.md index 81dccf889ce4d9..28f02f6add1908 100644 --- a/deps/npm/docs/content/commands/npm-prune.md +++ b/deps/npm/docs/content/commands/npm-prune.md @@ -191,6 +191,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-rebuild.md b/deps/npm/docs/content/commands/npm-rebuild.md index bddd18c2bcaf43..52c368c8c513b9 100644 --- a/deps/npm/docs/content/commands/npm-rebuild.md +++ b/deps/npm/docs/content/commands/npm-rebuild.md @@ -162,6 +162,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-uninstall.md b/deps/npm/docs/content/commands/npm-uninstall.md index 572d9dd8aaa3d1..e39c7e328b20ad 100644 --- a/deps/npm/docs/content/commands/npm-uninstall.md +++ b/deps/npm/docs/content/commands/npm-uninstall.md @@ -145,6 +145,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/commands/npm-update.md b/deps/npm/docs/content/commands/npm-update.md index be0b0cb937eae2..394773214995cd 100644 --- a/deps/npm/docs/content/commands/npm-update.md +++ b/deps/npm/docs/content/commands/npm-update.md @@ -437,6 +437,18 @@ This value is not exported to the environment for child processes. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + ### See Also diff --git a/deps/npm/docs/content/using-npm/config.md b/deps/npm/docs/content/using-npm/config.md index 71dab98a3831e9..ba79dd505a88ed 100644 --- a/deps/npm/docs/content/using-npm/config.md +++ b/deps/npm/docs/content/using-npm/config.md @@ -138,6 +138,8 @@ npm ls --global --parseable --long --loglevel info * Type: null or String A basic-auth string to use when authenticating against the npm registry. +This will ONLY be used to authenticate against the npm registry. For other +registries you will need to scope it like "//other-registry.tld/:_auth" Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in the @@ -891,6 +893,18 @@ number, if not already set in package.json. +#### `install-links` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + #### `json` * Default: false diff --git a/deps/npm/docs/content/using-npm/scope.md b/deps/npm/docs/content/using-npm/scope.md index 911d7ea5177c94..ca5903e78c0d95 100644 --- a/deps/npm/docs/content/using-npm/scope.md +++ b/deps/npm/docs/content/using-npm/scope.md @@ -79,9 +79,23 @@ If you wish, you may associate a scope with a registry; see below. #### Publishing public scoped packages to the primary npm registry -To publish a public scoped package, you must specify `--access public` with -the initial publication. This will publish the package and set access -to `public` as if you had run `npm access public` after publishing. +Publishing to a scope, you have two options: + +- Publishing to your user scope (example: `@username/module`) +- Publishing to an organization scope (example: `@org/module`) + +If publishing a public module to an organization scope, you must +first either create an organization with the name of the scope +that you'd like to publish to or be added to an existing organization +with the appropriate permisssions. For example, if you'd like to +publish to `@org`, you would need to create the `org` organization +on npmjs.com prior to trying to publish. + +Scoped packages are not public by default. You will need to specify +`--access public` with the initial `npm publish` command. This will publish +the package and set access to `public` as if you had run `npm access public` +after publishing. You do not need to do this when publishing new versions of +an existing scoped package. #### Publishing private scoped packages to the npm registry diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html index e9c7021a4256ff..af627aff7701a1 100644 --- a/deps/npm/docs/output/commands/npm-audit.html +++ b/deps/npm/docs/output/commands/npm-audit.html @@ -142,7 +142,7 @@

npm-audit

Table of contents

- +

Synopsis

@@ -442,6 +442,16 @@

include-workspace-root

This value is not exported to the environment for child processes.

+ +
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

+ +

See Also

    diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 8c57f50851462b..713f7b0c81e4f7 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -181,8 +181,9 @@

    Description

NOTE: If you create your package-lock.json file by running npm install with flags that can affect the shape of your dependency tree, such as ---legacy-peer-deps, you must provide the same flags to npm ci or you -are likely to encounter errors. An easy way to do this is to run +--legacy-peer-deps or --install-links, you must provide the same +flags to npm ci or you are likely to encounter errors. An easy way to do +this is to run, for example, npm config set legacy-peer-deps=true --location=project and commit the .npmrc file to your repo.

Example

diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html index d364a10b2ce289..5bc84af0a26ac7 100644 --- a/deps/npm/docs/output/commands/npm-dedupe.html +++ b/deps/npm/docs/output/commands/npm-dedupe.html @@ -142,7 +142,7 @@

npm-dedupe

Table of contents

- +

Synopsis

@@ -383,6 +383,16 @@

include-workspace-root

This value is not exported to the environment for child processes.

+ +
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

+ +

See Also

    diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html index 02decff2e9158f..9e57bd78dffb5d 100644 --- a/deps/npm/docs/output/commands/npm-find-dupes.html +++ b/deps/npm/docs/output/commands/npm-find-dupes.html @@ -142,7 +142,7 @@

    npm-find-dupes

    Table of contents

    - +

    Synopsis

    @@ -327,6 +327,16 @@

    include-workspace-root

    This value is not exported to the environment for child processes.

    + +
      +
    • Default: false
    • +
    • Type: Boolean
    • +
    +

    When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

    + +

    See Also

      diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html index 507003bcae022e..9e3655c49f7c5f 100644 --- a/deps/npm/docs/output/commands/npm-install-test.html +++ b/deps/npm/docs/output/commands/npm-install-test.html @@ -142,7 +142,7 @@

      npm-install-test

      Table of contents

      - +

      Synopsis

      @@ -399,6 +399,16 @@

      include-workspace-root

      This value is not exported to the environment for child processes.

      + +
        +
      • Default: false
      • +
      • Type: Boolean
      • +
      +

      When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

      + +

      See Also

        diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index de101873d98259..de949e9493afdb 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -142,7 +142,7 @@

        npm-install

        Table of contents

        - +

        Synopsis

        @@ -221,10 +221,10 @@

        Description

        npm will not install the package dependencies in the directory <folder>, but it will create a symlink to <folder>.

        -

        NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use npm pack while in the <folder> directory, and then install the resulting tarball instead of the <folder> using npm install <tarball file>

        +

        NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use the --install-links option.

        Example:

        -
        npm install ../../other-package
        +
        npm install ../../other-package --install-links
         npm install ./sub-package
         
        @@ -725,6 +725,16 @@

        include-workspace-root

        This value is not exported to the environment for child processes.

        + +
          +
        • Default: false
        • +
        • Type: Boolean
        • +
        +

        When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

        + +

        Algorithm

        Given a package{dep} structure: A{B,C}, B{C}, C{D}, diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html index daf2be56f2971d..33a2f33b69c8c5 100644 --- a/deps/npm/docs/output/commands/npm-link.html +++ b/deps/npm/docs/output/commands/npm-link.html @@ -142,7 +142,7 @@

        npm-link

        Table of contents

        - +

        Synopsis

        @@ -442,6 +442,16 @@

        include-workspace-root

        This value is not exported to the environment for child processes.

        + +
          +
        • Default: false
        • +
        • Type: Boolean
        • +
        +

        When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

        + +

        See Also

          diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 51524a4f1304c3..707324960a84bc 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -142,7 +142,7 @@

          npm-ls

          Table of contents

          - +

          Synopsis

          @@ -166,7 +166,7 @@

          Description

          the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

          -
          npm@8.7.0 /path/to/npm
          +
          npm@8.8.0 /path/to/npm
           └─┬ init-package-json@0.0.4
             └── promzard@0.1.5
           
          @@ -366,6 +366,16 @@

          include-workspace-root

          This value is not exported to the environment for child processes.

          + +
            +
          • Default: false
          • +
          • Type: Boolean
          • +
          +

          When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

          + +

          See Also

            diff --git a/deps/npm/docs/output/commands/npm-prune.html b/deps/npm/docs/output/commands/npm-prune.html index 51f7798ce348d2..0a248629bd8291 100644 --- a/deps/npm/docs/output/commands/npm-prune.html +++ b/deps/npm/docs/output/commands/npm-prune.html @@ -142,7 +142,7 @@

            npm-prune

            Table of contents

            - +

            Synopsis

            @@ -291,6 +291,16 @@

            include-workspace-root

            This value is not exported to the environment for child processes.

            + +
              +
            • Default: false
            • +
            • Type: Boolean
            • +
            +

            When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

            + +

            See Also

              diff --git a/deps/npm/docs/output/commands/npm-rebuild.html b/deps/npm/docs/output/commands/npm-rebuild.html index 1f0acc8d6a7102..f4524f1a4f0baa 100644 --- a/deps/npm/docs/output/commands/npm-rebuild.html +++ b/deps/npm/docs/output/commands/npm-rebuild.html @@ -142,7 +142,7 @@

              npm-rebuild

              Table of contents

              - +

              Synopsis

              @@ -272,6 +272,16 @@

              include-workspace-root

              This value is not exported to the environment for child processes.

              + +
                +
              • Default: false
              • +
              • Type: Boolean
              • +
              +

              When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

              + +

              See Also

                diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html index 3128b7e9bb99c9..9823201dbc812d 100644 --- a/deps/npm/docs/output/commands/npm-uninstall.html +++ b/deps/npm/docs/output/commands/npm-uninstall.html @@ -142,7 +142,7 @@

                npm-uninstall

                Table of contents

                - +

                Synopsis

                @@ -249,6 +249,16 @@

                include-workspace-root

                This value is not exported to the environment for child processes.

                + +
                  +
                • Default: false
                • +
                • Type: Boolean
                • +
                +

                When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

                + +

                See Also

                  diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html index e1c38072b4a756..2ada2dcb7cc9ca 100644 --- a/deps/npm/docs/output/commands/npm-update.html +++ b/deps/npm/docs/output/commands/npm-update.html @@ -142,7 +142,7 @@

                  npm-update

                  Table of contents

                  - +

                  Synopsis

                  @@ -478,6 +478,16 @@

                  include-workspace-root

                  This value is not exported to the environment for child processes.

                  + +
                    +
                  • Default: false
                  • +
                  • Type: Boolean
                  • +
                  +

                  When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

                  + +

                  See Also

                  -

                  A basic-auth string to use when authenticating against the npm registry.

                  +

                  A basic-auth string to use when authenticating against the npm registry. +This will ONLY be used to authenticate against the npm registry. For other +registries you will need to scope it like "//other-registry.tld/:_auth"

                  Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in the ~/.npmrc file by running npm login.

                  @@ -862,6 +864,16 @@

                  init-version

                  number, if not already set in package.json.

                  + +
                    +
                  • Default: false
                  • +
                  • Type: Boolean
                  • +
                  +

                  When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces.

                  + +

                  json

                  • Default: false
                  • diff --git a/deps/npm/docs/output/using-npm/scope.html b/deps/npm/docs/output/using-npm/scope.html index db6fc2a115f496..61ab698b5fbc95 100644 --- a/deps/npm/docs/output/using-npm/scope.html +++ b/deps/npm/docs/output/using-npm/scope.html @@ -194,9 +194,22 @@

                    Publishing scoped packages

                    does support scoped packages.)

                    If you wish, you may associate a scope with a registry; see below.

                    Publishing public scoped packages to the primary npm registry

                    -

                    To publish a public scoped package, you must specify --access public with -the initial publication. This will publish the package and set access -to public as if you had run npm access public after publishing.

                    +

                    Publishing to a scope, you have two options:

                    +
                      +
                    • Publishing to your user scope (example: @username/module)
                    • +
                    • Publishing to an organization scope (example: @org/module)
                    • +
                    +

                    If publishing a public module to an organization scope, you must +first either create an organization with the name of the scope +that you'd like to publish to or be added to an existing organization +with the appropriate permisssions. For example, if you'd like to +publish to @org, you would need to create the org organization +on npmjs.com prior to trying to publish.

                    +

                    Scoped packages are not public by default. You will need to specify +--access public with the initial npm publish command. This will publish +the package and set access to public as if you had run npm access public +after publishing. You do not need to do this when publishing new versions of +an existing scoped package.

                    Publishing private scoped packages to the npm registry

                    To publish a private scoped package to the npm registry, you must have an npm Private Modules diff --git a/deps/npm/lib/arborist-cmd.js b/deps/npm/lib/arborist-cmd.js index 6518e91e0ad9d7..5007fbd9244d24 100644 --- a/deps/npm/lib/arborist-cmd.js +++ b/deps/npm/lib/arborist-cmd.js @@ -12,6 +12,7 @@ class ArboristCmd extends BaseCommand { 'workspace', 'workspaces', 'include-workspace-root', + 'install-links', ] static ignoreImplicitWorkspace = false diff --git a/deps/npm/lib/commands/deprecate.js b/deps/npm/lib/commands/deprecate.js index 88eb320c32a523..0ae88f1921f566 100644 --- a/deps/npm/lib/commands/deprecate.js +++ b/deps/npm/lib/commands/deprecate.js @@ -26,7 +26,7 @@ class Deprecate extends BaseCommand { const packages = await libaccess.lsPackages(username, this.npm.flatOptions) return Object.keys(packages) .filter((name) => - packages[name] === 'write' && + packages[name] === 'read-write' && (opts.conf.argv.remain.length === 0 || name.startsWith(opts.conf.argv.remain[0]))) } diff --git a/deps/npm/lib/commands/diff.js b/deps/npm/lib/commands/diff.js index ff942cc44e9460..11ee78265e62a3 100644 --- a/deps/npm/lib/commands/diff.js +++ b/deps/npm/lib/commands/diff.js @@ -6,7 +6,7 @@ const Arborist = require('@npmcli/arborist') const pacote = require('pacote') const pickManifest = require('npm-pick-manifest') const log = require('../utils/log-shim') -const readPackageName = require('../utils/read-package-name.js') +const readPackage = require('read-package-json-fast') const BaseCommand = require('../base-command.js') class Diff extends BaseCommand { @@ -81,7 +81,8 @@ class Diff extends BaseCommand { async packageName (path) { let name try { - name = await readPackageName(this.prefix) + const pkg = await readPackage(resolve(this.prefix, 'package.json')) + name = pkg.name } catch (e) { log.verbose('diff', 'could not read project dir package.json') } @@ -114,7 +115,8 @@ class Diff extends BaseCommand { let noPackageJson let pkgName try { - pkgName = await readPackageName(this.prefix) + const pkg = await readPackage(resolve(this.prefix, 'package.json')) + pkgName = pkg.name } catch (e) { log.verbose('diff', 'could not read project dir package.json') noPackageJson = true @@ -225,7 +227,8 @@ class Diff extends BaseCommand { if (semverA && semverB) { let pkgName try { - pkgName = await readPackageName(this.prefix) + const pkg = await readPackage(resolve(this.prefix, 'package.json')) + pkgName = pkg.name } catch (e) { log.verbose('diff', 'could not read project dir package.json') } diff --git a/deps/npm/lib/commands/dist-tag.js b/deps/npm/lib/commands/dist-tag.js index 3b82c5194cca82..42cad80df0073f 100644 --- a/deps/npm/lib/commands/dist-tag.js +++ b/deps/npm/lib/commands/dist-tag.js @@ -1,9 +1,10 @@ const npa = require('npm-package-arg') +const path = require('path') const regFetch = require('npm-registry-fetch') const semver = require('semver') const log = require('../utils/log-shim') const otplease = require('../utils/otplease.js') -const readPackageName = require('../utils/read-package-name.js') +const readPackage = require('read-package-json-fast') const BaseCommand = require('../base-command.js') class DistTag extends BaseCommand { @@ -150,12 +151,12 @@ class DistTag extends BaseCommand { if (this.npm.config.get('global')) { throw this.usageError() } - const pkg = await readPackageName(this.npm.prefix) - if (!pkg) { + const { name } = await readPackage(path.resolve(this.npm.prefix, 'package.json')) + if (!name) { throw this.usageError() } - return this.list(pkg, opts) + return this.list(name, opts) } spec = npa(spec) diff --git a/deps/npm/lib/commands/exec.js b/deps/npm/lib/commands/exec.js index 5e6a94296d2872..f764cea528adba 100644 --- a/deps/npm/lib/commands/exec.js +++ b/deps/npm/lib/commands/exec.js @@ -48,10 +48,8 @@ class Exec extends BaseCommand { static ignoreImplicitWorkspace = false static isShellout = true - async exec (_args, { locationMsg, path, runPath } = {}) { - if (!path) { - path = this.npm.localPrefix - } + async exec (_args, { locationMsg, runPath } = {}) { + const path = this.npm.localPrefix if (!runPath) { runPath = process.cwd() @@ -95,7 +93,7 @@ class Exec extends BaseCommand { for (const path of this.workspacePaths) { const locationMsg = await getLocationMsg({ color, path }) - await this.exec(args, { locationMsg, path, runPath: path }) + await this.exec(args, { locationMsg, runPath: path }) } } } diff --git a/deps/npm/lib/commands/help-search.js b/deps/npm/lib/commands/help-search.js index 23b426eaa016db..488189bbbc5cd8 100644 --- a/deps/npm/lib/commands/help-search.js +++ b/deps/npm/lib/commands/help-search.js @@ -6,6 +6,8 @@ const glob = promisify(require('glob')) const readFile = promisify(fs.readFile) const BaseCommand = require('../base-command.js') +const globify = pattern => pattern.split('\\').join('/') + class HelpSearch extends BaseCommand { static description = 'Search npm help documentation' static name = 'help-search' @@ -19,7 +21,7 @@ class HelpSearch extends BaseCommand { } const docPath = path.resolve(__dirname, '..', '..', 'docs/content') - const files = await glob(`${docPath}/*/*.md`) + const files = await glob(`${globify(docPath)}/*/*.md`) const data = await this.readFiles(files) const results = await this.searchFiles(args, data, files) const formatted = this.formatResults(args, results) diff --git a/deps/npm/lib/commands/help.js b/deps/npm/lib/commands/help.js index d31b3ca697651d..e7d6395a1b01a6 100644 --- a/deps/npm/lib/commands/help.js +++ b/deps/npm/lib/commands/help.js @@ -5,6 +5,7 @@ const { promisify } = require('util') const glob = promisify(require('glob')) const localeCompare = require('@isaacs/string-locale-compare')('en') +const globify = pattern => pattern.split('\\').join('/') const BaseCommand = require('../base-command.js') // Strips out the number from foo.7 or foo.7. or foo.7.tgz @@ -26,7 +27,7 @@ class Help extends BaseCommand { return [] } const g = path.resolve(__dirname, '../../man/man[0-9]/*.[0-9]') - const files = await glob(g) + const files = await glob(globify(g)) return Object.keys(files.reduce(function (acc, file) { file = path.basename(file).replace(/\.[0-9]+$/, '') @@ -61,7 +62,7 @@ class Help extends BaseCommand { const manroot = path.resolve(__dirname, '..', '..', 'man') // find either section.n or npm-section.n const f = `${manroot}/${manSearch}/?(npm-)${section}.[0-9]*` - let mans = await glob(f) + let mans = await glob(globify(f)) mans = mans.sort((a, b) => { // Prefer the page with an npm prefix, if there's only one. const aHasPrefix = manNpmPrefixRegex.test(a) diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js index 0a5c827bcc97b5..d1f6d1481dddc5 100644 --- a/deps/npm/lib/commands/install.js +++ b/deps/npm/lib/commands/install.js @@ -139,6 +139,12 @@ class Install extends ArboristWorkspaceCmd { args = ['.'] } + // throw usage error if trying to install empty package + // name to global space, e.g: `npm i -g ""` + if (where === globalTop && !args.every(Boolean)) { + throw this.usageError() + } + const opts = { ...this.npm.flatOptions, auditLevel: null, diff --git a/deps/npm/lib/commands/owner.js b/deps/npm/lib/commands/owner.js index 07f71c5974768f..285b06be8e5fe1 100644 --- a/deps/npm/lib/commands/owner.js +++ b/deps/npm/lib/commands/owner.js @@ -3,8 +3,18 @@ const npmFetch = require('npm-registry-fetch') const pacote = require('pacote') const log = require('../utils/log-shim') const otplease = require('../utils/otplease.js') -const readLocalPkgName = require('../utils/read-package-name.js') +const readPackageJsonFast = require('read-package-json-fast') const BaseCommand = require('../base-command.js') +const { resolve } = require('path') + +const readJson = async (pkg) => { + try { + const json = await readPackageJsonFast(pkg) + return json + } catch { + return {} + } +} class Owner extends BaseCommand { static description = 'Manage package owners' @@ -41,12 +51,12 @@ class Owner extends BaseCommand { if (this.npm.config.get('global')) { return [] } - const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) { + const { name } = await readJson(resolve(this.npm.prefix, 'package.json')) + if (!name) { return [] } - const spec = npa(pkgName) + const spec = npa(name) const data = await pacote.packument(spec, { ...this.npm.flatOptions, fullMetadata: true, @@ -96,12 +106,12 @@ class Owner extends BaseCommand { if (this.npm.config.get('global')) { throw this.usageError() } - const pkgName = await readLocalPkgName(this.npm.prefix) - if (!pkgName) { + const { name } = await readJson(resolve(this.npm.prefix, 'package.json')) + if (!name) { throw this.usageError() } - return pkgName + return name } return pkg } @@ -125,15 +135,6 @@ class Owner extends BaseCommand { throw err } - if (!u || !u.name || u.error) { - throw Object.assign( - new Error( - "Couldn't get user data for " + user + ': ' + JSON.stringify(u) - ), - { code: 'EOWNERUSER' } - ) - } - // normalize user data u = { name: u.name, email: u.email } @@ -177,32 +178,31 @@ class Owner extends BaseCommand { } const dataPath = `/${spec.escapedName}/-rev/${encodeURIComponent(data._rev)}` - const res = await otplease(this.npm.flatOptions, opts => { - return npmFetch.json(dataPath, { - ...opts, - method: 'PUT', - body: { - _id: data._id, - _rev: data._rev, - maintainers, - }, - spec, + try { + const res = await otplease(this.npm.flatOptions, opts => { + return npmFetch.json(dataPath, { + ...opts, + method: 'PUT', + body: { + _id: data._id, + _rev: data._rev, + maintainers, + }, + spec, + }) }) - }) - - if (!res.error) { if (addOrRm === 'add') { this.npm.output(`+ ${user} (${spec.name})`) } else { this.npm.output(`- ${user} (${spec.name})`) } - } else { + return res + } catch (err) { throw Object.assign( - new Error('Failed to update package: ' + JSON.stringify(res)), + new Error('Failed to update package: ' + JSON.stringify(err.message)), { code: 'EOWNERMUTATE' } ) } - return res } } diff --git a/deps/npm/lib/commands/publish.js b/deps/npm/lib/commands/publish.js index 51861c5aa35547..ff303669387868 100644 --- a/deps/npm/lib/commands/publish.js +++ b/deps/npm/lib/commands/publish.js @@ -69,10 +69,6 @@ class Publish extends BaseCommand { const spec = npa(args[0]) let manifest = await this.getManifest(spec, opts) - if (manifest.publishConfig) { - flatten(manifest.publishConfig, opts) - } - // only run scripts for directory type publishes if (spec.type === 'directory' && !ignoreScripts) { await runScript({ @@ -92,12 +88,8 @@ class Publish extends BaseCommand { // so that we send the latest and greatest thing to the registry // note that publishConfig might have changed as well! manifest = await this.getManifest(spec, opts) - if (manifest.publishConfig) { - flatten(manifest.publishConfig, opts) - } - // note that logTar calls log.notice(), so if we ARE in silent mode, - // this will do nothing, but we still want it in the debuglog if it fails. + // JSON already has the package contents if (!json) { logTar(pkgContents, { unicode }) } @@ -197,15 +189,22 @@ class Publish extends BaseCommand { // if it's a directory, read it from the file system // otherwise, get the full metadata from whatever it is - getManifest (spec, opts) { + // XXX can't pacote read the manifest from a directory? + async getManifest (spec, opts) { + let manifest if (spec.type === 'directory') { - return readJson(`${spec.fetchSpec}/package.json`) + manifest = await readJson(`${spec.fetchSpec}/package.json`) + } else { + manifest = await pacote.manifest(spec, { + ...opts, + fullmetadata: true, + fullReadJson: true, + }) + } + if (manifest.publishConfig) { + flatten(manifest.publishConfig, opts) } - return pacote.manifest(spec, { - ...opts, - fullMetadata: true, - fullReadJson: true, - }) + return manifest } } module.exports = Publish diff --git a/deps/npm/lib/utils/config/definitions.js b/deps/npm/lib/utils/config/definitions.js index 7ff0eeb1283c5c..4a1f971d85436c 100644 --- a/deps/npm/lib/utils/config/definitions.js +++ b/deps/npm/lib/utils/config/definitions.js @@ -147,6 +147,8 @@ define('_auth', { type: [null, String], description: ` A basic-auth string to use when authenticating against the npm registry. + This will ONLY be used to authenticate against the npm registry. For other + registries you will need to scope it like "//other-registry.tld/:_auth" Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in @@ -1070,6 +1072,17 @@ define('init.version', { `, }) +define('install-links', { + default: false, + type: Boolean, + description: ` + When set file: protocol dependencies that exist outside of the project root + will be packed and installed as regular dependencies instead of creating a + symlink. This option has no effect on workspaces. + `, + flatten, +}) + define('json', { default: false, type: Boolean, diff --git a/deps/npm/lib/utils/log-file.js b/deps/npm/lib/utils/log-file.js index 282c72700e58e2..9cf6513bedf484 100644 --- a/deps/npm/lib/utils/log-file.js +++ b/deps/npm/lib/utils/log-file.js @@ -9,6 +9,7 @@ const fs = require('@npmcli/fs') const log = require('./log-shim') const padZero = (n, length) => n.toString().padStart(length.toString().length, '0') +const globify = pattern => pattern.split('\\').join('/') const _logHandler = Symbol('logHandler') const _formatLogItem = Symbol('formatLogItem') @@ -225,7 +226,7 @@ class LogFiles { ) // Always ignore the currently written files - const files = await glob(logGlob, { ignore: this.#files }) + const files = await glob(globify(logGlob), { ignore: this.#files.map(globify) }) const toDelete = files.length - this.#logsMax if (toDelete <= 0) { @@ -236,7 +237,7 @@ class LogFiles { for (const file of files.slice(0, toDelete)) { try { - await rimraf(file) + await rimraf(file, { glob: false }) } catch (e) { log.silly('logfile', 'error removing log file', file, e) } diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index f16e4d847bcc43..5954f069d9a9f9 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -423,6 +423,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index eac2197c2ad3f8..0cfdc6cf07452d 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -48,8 +48,9 @@ installs are essentially frozen\. .P NOTE: If you create your \fBpackage\-lock\.json\fP file by running \fBnpm install\fP with flags that can affect the shape of your dependency tree, such as -\fB\-\-legacy\-peer\-deps\fP, you \fImust\fR provide the same flags to \fBnpm ci\fP or you -are likely to encounter errors\. An easy way to do this is to run +\fB\-\-legacy\-peer\-deps\fP or \fB\-\-install\-links\fP, you \fImust\fR provide the same +flags to \fBnpm ci\fP or you are likely to encounter errors\. An easy way to do +this is to run, for example, \fBnpm config set legacy\-peer\-deps=true \-\-location=project\fP and commit the \fB\|\.npmrc\fP file to your repo\. .SS Example diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 4a49a555a2314a..71c319f04a27d0 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -308,6 +308,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index d52a9aa5260342..d89bd9f390829e 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -226,6 +226,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 7479f9371ea981..b28c8300e19674 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -316,6 +316,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index cc83443f5447b8..1f3bf51ada1feb 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -91,14 +91,14 @@ NOTE: The \fB\-\-production\fP flag has no particular meaning when adding a \fInpm will not install the package dependencies\fR in the directory \fB\fP, but it will create a symlink to \fB\fP\|\. .QP -NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use npm help \fBpack\fP while in the \fB\fP directory, and then install the resulting tarball instead of the \fB\fP using \fBnpm install \fP +NOTE: If you want to install the content of a directory like a package from the registry instead of creating a link, you would need to use the \fB\-\-install\-links\fP option\. . Example: .P .RS 2 .nf - npm install \.\./\.\./other\-package + npm install \.\./\.\./other\-package \-\-install\-links npm install \./sub\-package .fi .RE @@ -727,6 +727,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS Algorithm .P Given a \fBpackage{dep}\fP structure: \fBA{B,C}, B{C}, C{D}\fP, diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 5911d4751b7f6b..d726e669dd2411 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -390,6 +390,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 1a55f04bc023d8..6485cb3650decf 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@8\.7\.0 /path/to/npm +npm@8\.8\.0 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi @@ -286,6 +286,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 81850d8a836278..a3d43f6455fc5c 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -186,6 +186,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 0ff22d856bbb68..bd5a47263f0e2a 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -159,6 +159,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 2079d56b8a5cf2..387a037ca43183 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -140,6 +140,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index 073f791ac089b0..e681d6a5543169 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -440,6 +440,18 @@ all workspaces via the \fBworkspaces\fP flag, will cause npm to operate only on the specified workspaces, and not on the root project\. .P This value is not exported to the environment for child processes\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS See Also .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index c6adaa2b527d80..3147db3ad76ee1 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -4,7 +4,7 @@ .SS Synopsis .SS Version .P -8\.7\.0 +8\.8\.0 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index ee680d515c9a24..3bb501bcdae6f4 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -173,6 +173,8 @@ Type: null or String .RE .P A basic\-auth string to use when authenticating against the npm registry\. +This will ONLY be used to authenticate against the npm registry\. For other +registries you will need to scope it like "//other\-registry\.tld/:_auth" .P Warning: This should generally not be set via a command\-line option\. It is safer to use a registry\-provided authentication bearer token stored in the @@ -947,6 +949,18 @@ Type: SemVer string .P The value that \fBnpm init\fP should use by default for the package version number, if not already set in package\.json\. +.SS \fBinstall\-links\fP +.RS 0 +.IP \(bu 2 +Default: false +.IP \(bu 2 +Type: Boolean + +.RE +.P +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink\. This option has no effect on workspaces\. .SS \fBjson\fP .RS 0 .IP \(bu 2 diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 43faee6693540f..0fd24d0fcb6ac1 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -80,9 +80,27 @@ registry\. If you wish, you may associate a scope with a registry; see below\. .SS Publishing public scoped packages to the primary npm registry .P -To publish a public scoped package, you must specify \fB\-\-access public\fP with -the initial publication\. This will publish the package and set access -to \fBpublic\fP as if you had run \fBnpm access public\fP after publishing\. +Publishing to a scope, you have two options: +.RS 0 +.IP \(bu 2 +Publishing to your user scope (example: \fB@username/module\fP) +.IP \(bu 2 +Publishing to an organization scope (example: \fB@org/module\fP) + +.RE +.P +If publishing a public module to an organization scope, you must +first either create an organization with the name of the scope +that you'd like to publish to or be added to an existing organization +with the appropriate permisssions\. For example, if you'd like to +publish to \fB@org\fP, you would need to create the \fBorg\fP organization +on npmjs\.com prior to trying to publish\. +.P +Scoped packages are not public by default\. You will need to specify +\fB\-\-access public\fP with the initial \fBnpm publish\fP command\. This will publish +the package and set access to \fBpublic\fP as if you had run \fBnpm access public\fP +after publishing\. You do not need to do this when publishing new versions of +an existing scoped package\. .SS Publishing private scoped packages to the npm registry .P To publish a private scoped package to the npm registry, you must have diff --git a/deps/npm/node_modules/colors/LICENSE b/deps/npm/node_modules/@colors/colors/LICENSE similarity index 96% rename from deps/npm/node_modules/colors/LICENSE rename to deps/npm/node_modules/@colors/colors/LICENSE index 17880ff02972b2..6b86056199d2ac 100644 --- a/deps/npm/node_modules/colors/LICENSE +++ b/deps/npm/node_modules/@colors/colors/LICENSE @@ -5,6 +5,7 @@ Original Library Additional Functionality - Copyright (c) Sindre Sorhus (sindresorhus.com) + - Copyright (c) DABH (https://github.com/DABH) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/deps/npm/node_modules/colors/examples/normal-usage.js b/deps/npm/node_modules/@colors/colors/examples/normal-usage.js similarity index 98% rename from deps/npm/node_modules/colors/examples/normal-usage.js rename to deps/npm/node_modules/@colors/colors/examples/normal-usage.js index 822db1cc81ba44..c4515653e288d7 100644 --- a/deps/npm/node_modules/colors/examples/normal-usage.js +++ b/deps/npm/node_modules/@colors/colors/examples/normal-usage.js @@ -29,6 +29,7 @@ console.log('Background color attack!'.black.bgWhite); console.log('Use random styles on everything!'.random); console.log('America, Heck Yeah!'.america); +// eslint-disable-next-line max-len console.log('Blindingly '.brightCyan + 'bright? '.brightRed + 'Why '.brightYellow + 'not?!'.brightGreen); console.log('Setting themes is useful'); @@ -79,4 +80,3 @@ console.log('this is a warning'.warn); console.log('this is an input'.input); // console.log("Don't summon".zalgo) - diff --git a/deps/npm/node_modules/colors/examples/safe-string.js b/deps/npm/node_modules/@colors/colors/examples/safe-string.js similarity index 98% rename from deps/npm/node_modules/colors/examples/safe-string.js rename to deps/npm/node_modules/@colors/colors/examples/safe-string.js index 5bc0168e33ed02..ed5f4ca468e10f 100644 --- a/deps/npm/node_modules/colors/examples/safe-string.js +++ b/deps/npm/node_modules/@colors/colors/examples/safe-string.js @@ -28,6 +28,7 @@ console.log(colors.black.bgWhite('Background color attack!')); console.log(colors.random('Use random styles on everything!')); console.log(colors.america('America, Heck Yeah!')); +// eslint-disable-next-line max-len console.log(colors.brightCyan('Blindingly ') + colors.brightRed('bright? ') + colors.brightYellow('Why ') + colors.brightGreen('not?!')); console.log('Setting themes is useful'); @@ -75,5 +76,3 @@ console.log(colors.warn('this is a warning')); console.log(colors.input('this is an input')); // console.log(colors.zalgo("Don't summon him")) - - diff --git a/deps/npm/node_modules/colors/index.d.ts b/deps/npm/node_modules/@colors/colors/index.d.ts similarity index 96% rename from deps/npm/node_modules/colors/index.d.ts rename to deps/npm/node_modules/@colors/colors/index.d.ts index baa70686535a78..df3f2e6afc945a 100644 --- a/deps/npm/node_modules/colors/index.d.ts +++ b/deps/npm/node_modules/@colors/colors/index.d.ts @@ -1,7 +1,7 @@ -// Type definitions for Colors.js 1.2 +// Type definitions for @colors/colors 1.4+ // Project: https://github.com/Marak/colors.js // Definitions by: Bart van der Schoor , Staffan Eketorp -// Definitions: https://github.com/Marak/colors.js +// Definitions: https://github.com/DABH/colors.js export interface Color { (text: string): string; diff --git a/deps/npm/node_modules/colors/lib/colors.js b/deps/npm/node_modules/@colors/colors/lib/colors.js similarity index 99% rename from deps/npm/node_modules/colors/lib/colors.js rename to deps/npm/node_modules/@colors/colors/lib/colors.js index 9c7f1d14163133..d9fb08762fde51 100644 --- a/deps/npm/node_modules/colors/lib/colors.js +++ b/deps/npm/node_modules/@colors/colors/lib/colors.js @@ -65,7 +65,7 @@ var stylize = colors.stylize = function stylize(str, style) { var styleMap = ansiStyles[style]; // Stylize should work for non-ANSI styles, too - if(!styleMap && style in colors){ + if (!styleMap && style in colors) { // Style maps like trap operate as functions on strings; // they don't have properties like open or close. return colors[style](str); diff --git a/deps/npm/node_modules/colors/lib/custom/trap.js b/deps/npm/node_modules/@colors/colors/lib/custom/trap.js similarity index 100% rename from deps/npm/node_modules/colors/lib/custom/trap.js rename to deps/npm/node_modules/@colors/colors/lib/custom/trap.js diff --git a/deps/npm/node_modules/colors/lib/custom/zalgo.js b/deps/npm/node_modules/@colors/colors/lib/custom/zalgo.js similarity index 99% rename from deps/npm/node_modules/colors/lib/custom/zalgo.js rename to deps/npm/node_modules/@colors/colors/lib/custom/zalgo.js index 0ef2b011956358..01bdd2b802f626 100644 --- a/deps/npm/node_modules/colors/lib/custom/zalgo.js +++ b/deps/npm/node_modules/@colors/colors/lib/custom/zalgo.js @@ -107,4 +107,3 @@ module['exports'] = function zalgo(text, options) { // don't summon him return heComes(text, options); }; - diff --git a/deps/npm/node_modules/colors/lib/extendStringPrototype.js b/deps/npm/node_modules/@colors/colors/lib/extendStringPrototype.js similarity index 100% rename from deps/npm/node_modules/colors/lib/extendStringPrototype.js rename to deps/npm/node_modules/@colors/colors/lib/extendStringPrototype.js diff --git a/deps/npm/node_modules/colors/lib/index.js b/deps/npm/node_modules/@colors/colors/lib/index.js similarity index 100% rename from deps/npm/node_modules/colors/lib/index.js rename to deps/npm/node_modules/@colors/colors/lib/index.js diff --git a/deps/npm/node_modules/colors/lib/maps/america.js b/deps/npm/node_modules/@colors/colors/lib/maps/america.js similarity index 100% rename from deps/npm/node_modules/colors/lib/maps/america.js rename to deps/npm/node_modules/@colors/colors/lib/maps/america.js diff --git a/deps/npm/node_modules/colors/lib/maps/rainbow.js b/deps/npm/node_modules/@colors/colors/lib/maps/rainbow.js similarity index 99% rename from deps/npm/node_modules/colors/lib/maps/rainbow.js rename to deps/npm/node_modules/@colors/colors/lib/maps/rainbow.js index 2b00ac0ac998e6..874508da8ed17e 100644 --- a/deps/npm/node_modules/colors/lib/maps/rainbow.js +++ b/deps/npm/node_modules/@colors/colors/lib/maps/rainbow.js @@ -9,4 +9,3 @@ module['exports'] = function(colors) { } }; }; - diff --git a/deps/npm/node_modules/colors/lib/maps/random.js b/deps/npm/node_modules/@colors/colors/lib/maps/random.js similarity index 100% rename from deps/npm/node_modules/colors/lib/maps/random.js rename to deps/npm/node_modules/@colors/colors/lib/maps/random.js diff --git a/deps/npm/node_modules/colors/lib/maps/zebra.js b/deps/npm/node_modules/@colors/colors/lib/maps/zebra.js similarity index 100% rename from deps/npm/node_modules/colors/lib/maps/zebra.js rename to deps/npm/node_modules/@colors/colors/lib/maps/zebra.js diff --git a/deps/npm/node_modules/colors/lib/styles.js b/deps/npm/node_modules/@colors/colors/lib/styles.js similarity index 100% rename from deps/npm/node_modules/colors/lib/styles.js rename to deps/npm/node_modules/@colors/colors/lib/styles.js diff --git a/deps/npm/node_modules/colors/lib/system/has-flag.js b/deps/npm/node_modules/@colors/colors/lib/system/has-flag.js similarity index 100% rename from deps/npm/node_modules/colors/lib/system/has-flag.js rename to deps/npm/node_modules/@colors/colors/lib/system/has-flag.js diff --git a/deps/npm/node_modules/colors/lib/system/supports-colors.js b/deps/npm/node_modules/@colors/colors/lib/system/supports-colors.js similarity index 100% rename from deps/npm/node_modules/colors/lib/system/supports-colors.js rename to deps/npm/node_modules/@colors/colors/lib/system/supports-colors.js diff --git a/deps/npm/node_modules/colors/package.json b/deps/npm/node_modules/@colors/colors/package.json similarity index 68% rename from deps/npm/node_modules/colors/package.json rename to deps/npm/node_modules/@colors/colors/package.json index dbd71ba5a7756d..cb87f20953886a 100644 --- a/deps/npm/node_modules/colors/package.json +++ b/deps/npm/node_modules/@colors/colors/package.json @@ -1,16 +1,16 @@ { - "name": "colors", + "name": "@colors/colors", "description": "get colors in your node.js console", - "version": "1.4.0", - "author": "Marak Squires", + "version": "1.5.0", + "author": "DABH", "contributors": [ { "name": "DABH", "url": "https://github.com/DABH" } ], - "homepage": "https://github.com/Marak/colors.js", - "bugs": "https://github.com/Marak/colors.js/issues", + "homepage": "https://github.com/DABH/colors.js", + "bugs": "https://github.com/DABH/colors.js/issues", "keywords": [ "ansi", "terminal", @@ -18,12 +18,12 @@ ], "repository": { "type": "git", - "url": "http://github.com/Marak/colors.js.git" + "url": "http://github.com/DABH/colors.js.git" }, "license": "MIT", "scripts": { "lint": "eslint . --fix", - "test": "node tests/basic-test.js && node tests/safe-test.js" + "test": "export FORCE_COLOR=1 && node tests/basic-test.js && node tests/safe-test.js" }, "engines": { "node": ">=0.1.90" diff --git a/deps/npm/node_modules/colors/safe.d.ts b/deps/npm/node_modules/@colors/colors/safe.d.ts similarity index 100% rename from deps/npm/node_modules/colors/safe.d.ts rename to deps/npm/node_modules/@colors/colors/safe.d.ts diff --git a/deps/npm/node_modules/colors/safe.js b/deps/npm/node_modules/@colors/colors/safe.js similarity index 100% rename from deps/npm/node_modules/colors/safe.js rename to deps/npm/node_modules/@colors/colors/safe.js diff --git a/deps/npm/node_modules/colors/themes/generic-logging.js b/deps/npm/node_modules/@colors/colors/themes/generic-logging.js similarity index 100% rename from deps/npm/node_modules/colors/themes/generic-logging.js rename to deps/npm/node_modules/@colors/colors/themes/generic-logging.js diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index f3166c37e14753..55eb8292335d00 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -124,6 +124,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { globalStyle = false, idealTree = null, includeWorkspaceRoot = false, + installLinks = false, legacyPeerDeps = false, packageLock = true, strictPeerDeps = false, @@ -135,6 +136,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_strictPeerDeps] = !!strictPeerDeps this.idealTree = idealTree + this.installLinks = installLinks this.legacyPeerDeps = legacyPeerDeps this[_usePackageLock] = packageLock @@ -410,6 +412,7 @@ Try using the package name instead, e.g: peer: false, optional: false, global: this[_global], + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, loadOverrides: true, }) @@ -424,6 +427,7 @@ Try using the package name instead, e.g: peer: false, optional: false, global: this[_global], + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, root, }) @@ -992,6 +996,7 @@ This is a one-time fix-up, please be patient... preferDedupe: this[_preferDedupe], legacyBundling: this[_legacyBundling], strictPeerDeps: this[_strictPeerDeps], + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, globalStyle: this[_globalStyle], })) @@ -1151,6 +1156,7 @@ This is a one-time fix-up, please be patient... const vr = new Node({ path: node.realpath, sourceReference: node, + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, overrides: node.overrides, }) @@ -1268,17 +1274,18 @@ This is a one-time fix-up, please be patient... // the object so it doesn't get mutated. // Don't bother to load the manifest for link deps, because the target // might be within another package that doesn't exist yet. - const { legacyPeerDeps } = this + const { installLinks, legacyPeerDeps } = this + const isWorkspace = this.idealTree.workspaces && this.idealTree.workspaces.has(spec.name) - // spec is a directory, link it - if (spec.type === 'directory') { + // spec is a directory, link it unless installLinks is set or it's a workspace + if (spec.type === 'directory' && (isWorkspace || !installLinks)) { return this[_linkFromSpec](name, spec, parent, edge) } // if the spec matches a workspace name, then see if the workspace node will // satisfy the edge. if it does, we return the workspace node to make sure it // takes priority. - if (this.idealTree.workspaces && this.idealTree.workspaces.has(spec.name)) { + if (isWorkspace) { const existingNode = this.idealTree.edgesOut.get(spec.name).to if (existingNode && existingNode.isWorkspace && existingNode.satisfies(edge)) { return edge.to @@ -1288,7 +1295,7 @@ This is a one-time fix-up, please be patient... // spec isn't a directory, and either isn't a workspace or the workspace we have // doesn't satisfy the edge. try to fetch a manifest and build a node from that. return this[_fetchManifest](spec) - .then(pkg => new Node({ name, pkg, parent, legacyPeerDeps }), error => { + .then(pkg => new Node({ name, pkg, parent, installLinks, legacyPeerDeps }), error => { error.requiredBy = edge.from.location || '.' // failed to load the spec, either because of enotarget or @@ -1298,6 +1305,7 @@ This is a one-time fix-up, please be patient... name, parent, error, + installLinks, legacyPeerDeps, }) this[_loadFailures].add(n) @@ -1307,9 +1315,9 @@ This is a one-time fix-up, please be patient... [_linkFromSpec] (name, spec, parent, edge) { const realpath = spec.fetchSpec - const { legacyPeerDeps } = this + const { installLinks, legacyPeerDeps } = this return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => { - const link = new Link({ name, parent, realpath, pkg, legacyPeerDeps }) + const link = new Link({ name, parent, realpath, pkg, installLinks, legacyPeerDeps }) this[_linkNodes].add(link) return link }) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index b04fc88f65ccba..70b898141cc54c 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -283,6 +283,7 @@ module.exports = cls => class ActualLoader extends cls { .then(pkg => [pkg, null], error => [null, error]) .then(([pkg, error]) => { return this[normalize(path) === real ? _newNode : _newLink]({ + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, path, realpath: real, diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index 8a41e7686e7e19..097e5fb84298ed 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -278,6 +278,7 @@ module.exports = cls => class VirtualLoader extends cls { const peer = sw.peer const node = new Node({ + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, root: this.virtualTree, path, @@ -304,6 +305,7 @@ module.exports = cls => class VirtualLoader extends cls { [loadLink] (location, targetLoc, target, meta) { const path = resolve(this.path, location) const link = new Link({ + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, path, realpath: resolve(this.path, targetLoc), diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js b/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js index 2c837ae8884485..c69ab557ae491a 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js @@ -53,9 +53,7 @@ const depValid = (child, requested, requestor) => { return semver.satisfies(child.version, requested.fetchSpec, true) case 'directory': - // directory must be a link to the specified folder - return !!child.isLink && - relative(child.realpath, requested.fetchSpec) === '' + return linkValid(child, requested, requestor) case 'file': return tarballValid(child, requested, requestor) @@ -108,6 +106,18 @@ const depValid = (child, requested, requestor) => { return false } +const linkValid = (child, requested, requestor) => { + const isLink = !!child.isLink + // if we're installing links and the node is a link, then it's invalid because we want + // a real node to be there + if (requestor.installLinks) { + return !isLink + } + + // directory must be a link to the specified folder + return isLink && relative(child.realpath, requested.fetchSpec) === '' +} + const tarballValid = (child, requested, requestor) => { if (child.isLink) { return false diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js index c79bc0bd3a00b3..60301798b918d4 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js @@ -86,6 +86,7 @@ class Node { name, children, fsChildren, + installLinks = false, legacyPeerDeps = false, linksIn, hasShrinkwrap, @@ -152,6 +153,7 @@ class Node { } this.integrity = integrity || pkg._integrity || null this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false + this.installLinks = installLinks this.legacyPeerDeps = legacyPeerDeps this.children = new CaseInsensitiveMap() @@ -1149,6 +1151,9 @@ class Node { for (const kid of node.children.values()) { kid.parent = this } + if (node.isLink && node.target) { + node.target.root = null + } } if (!node.isRoot) { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js index c0cbe91fe3667f..9d84d3f1b08a5a 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js @@ -45,6 +45,7 @@ class PlaceDep { auditReport, legacyBundling, strictPeerDeps, + installLinks, legacyPeerDeps, globalStyle, } = parent || options @@ -56,6 +57,7 @@ class PlaceDep { auditReport, legacyBundling, strictPeerDeps, + installLinks, legacyPeerDeps, globalStyle, }) @@ -293,6 +295,7 @@ class PlaceDep { pkg: dep.package, resolved: dep.resolved, integrity: dep.integrity, + installLinks: this.installLinks, legacyPeerDeps: this.legacyPeerDeps, error: dep.errors[0], ...(dep.overrides ? { overrides: dep.overrides } : {}), diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 01e3db329ad505..bf3031fd066d5f 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,11 +1,11 @@ { "name": "@npmcli/arborist", - "version": "5.0.6", + "version": "5.1.1", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", "@npmcli/installed-package-contents": "^1.0.7", - "@npmcli/map-workspaces": "^2.0.0", + "@npmcli/map-workspaces": "^2.0.3", "@npmcli/metavuln-calculator": "^3.0.1", "@npmcli/move-file": "^2.0.0", "@npmcli/name-from-folder": "^1.0.1", @@ -13,7 +13,7 @@ "@npmcli/package-json": "^2.0.0", "@npmcli/run-script": "^3.0.0", "bin-links": "^3.0.0", - "cacache": "^16.0.0", + "cacache": "^16.0.6", "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", @@ -24,7 +24,7 @@ "npm-package-arg": "^9.0.0", "npm-pick-manifest": "^7.0.0", "npm-registry-fetch": "^13.0.0", - "npmlog": "^6.0.1", + "npmlog": "^6.0.2", "pacote": "^13.0.5", "parse-conflict-json": "^2.0.1", "proc-log": "^2.0.0", @@ -33,14 +33,14 @@ "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", - "semver": "^7.3.5", + "semver": "^7.3.7", "ssri": "^9.0.0", "treeverse": "^2.0.0", "walk-up-path": "^1.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "benchmark": "^2.1.4", "chalk": "^4.1.0", "minify-registry-metadata": "^2.1.0", @@ -101,6 +101,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/@npmcli/map-workspaces/package.json b/deps/npm/node_modules/@npmcli/map-workspaces/package.json index 8ae823cf3e9b7b..3025081e5529b2 100644 --- a/deps/npm/node_modules/@npmcli/map-workspaces/package.json +++ b/deps/npm/node_modules/@npmcli/map-workspaces/package.json @@ -1,16 +1,19 @@ { "name": "@npmcli/map-workspaces", - "version": "2.0.2", + "version": "2.0.3", "main": "lib/index.js", "files": [ - "bin", - "lib" + "bin/", + "lib/" ], "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "description": "Retrieves a name:pathname Map for a given workspaces config", - "repository": "https://github.com/npm/map-workspaces", + "repository": { + "type": "git", + "url": "https://github.com/npm/map-workspaces.git" + }, "keywords": [ "npm", "npmcli", @@ -22,33 +25,34 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint '**/*.js'", + "lint": "eslint \"**/*.js\"", "pretest": "npm run lint", "test": "tap", "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", - "postlint": "npm-template-check", + "postlint": "template-oss-check", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", - "template-copy": "npm-template-copy --force" + "template-oss-apply": "template-oss-apply --force" }, "tap": { "check-coverage": true }, "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "eslint": "^8.10.0", - "tap": "^15.1.6" + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.4.1", + "tap": "^16.0.1" }, "dependencies": { "@npmcli/name-from-folder": "^1.0.1", - "glob": "^7.2.0", + "glob": "^8.0.1", "minimatch": "^5.0.1", "read-package-json-fast": "^2.0.3" }, "templateOSS": { - "version": "2.9.2" + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.4.1" } } diff --git a/deps/npm/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/cacache/lib/verify.js index 300cd9f9de1c42..a39fb6ce1d1dc5 100644 --- a/deps/npm/node_modules/cacache/lib/verify.js +++ b/deps/npm/node_modules/cacache/lib/verify.js @@ -13,6 +13,8 @@ const path = require('path') const rimraf = util.promisify(require('rimraf')) const ssri = require('ssri') +const globify = pattern => pattern.split('\\').join('/') + const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) @@ -119,7 +121,7 @@ function garbageCollect (cache, opts) { indexStream.on('end', resolve).on('error', reject) }).then(() => { const contentDir = contentPath.contentDir(cache) - return glob(path.join(contentDir, '**'), { + return glob(globify(path.join(contentDir, '**')), { follow: false, nodir: true, nosort: true, diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index 3467f8af804edd..9eb646df76b40a 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "16.0.4", + "version": "16.0.6", "cache-version": { "content": "2", "index": "5" @@ -53,7 +53,7 @@ "@npmcli/move-file": "^2.0.0", "chownr": "^2.0.0", "fs-minipass": "^2.1.0", - "glob": "^7.2.0", + "glob": "^8.0.1", "infer-owner": "^1.0.4", "lru-cache": "^7.7.1", "minipass": "^3.1.6", @@ -70,7 +70,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.2.2", + "@npmcli/template-oss": "3.4.1", "benchmark": "^2.1.4", "chalk": "^4.1.2", "require-inject": "^1.4.4", @@ -87,7 +87,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "3.2.2" + "version": "3.4.1" }, "author": "GitHub Inc." } diff --git a/deps/npm/node_modules/cli-table3/index.d.ts b/deps/npm/node_modules/cli-table3/index.d.ts index aa94b7440d0ead..16980f848cc8ec 100644 --- a/deps/npm/node_modules/cli-table3/index.d.ts +++ b/deps/npm/node_modules/cli-table3/index.d.ts @@ -27,6 +27,7 @@ declare namespace CliTable3 { rowAligns: VerticalAlignment[]; head: string[]; wordWrap: boolean; + wrapOnWordBoundary: boolean; } interface TableInstanceOptions extends TableOptions { diff --git a/deps/npm/node_modules/cli-table3/package.json b/deps/npm/node_modules/cli-table3/package.json index 82a4905f6ec4e5..4e6689621968ca 100644 --- a/deps/npm/node_modules/cli-table3/package.json +++ b/deps/npm/node_modules/cli-table3/package.json @@ -1,6 +1,6 @@ { "name": "cli-table3", - "version": "0.6.1", + "version": "0.6.2", "description": "Pretty unicode tables for the command line. Based on the original cli-table.", "main": "index.js", "types": "index.d.ts", @@ -16,8 +16,8 @@ "string-width": "^4.2.0" }, "devDependencies": { - "ansi-256-colors": "^1.1.0", "cli-table": "^0.3.1", + "eslint": "^6.0.0", "eslint-config-prettier": "^6.0.0", "eslint-plugin-prettier": "^3.0.0", "jest": "^25.2.4", @@ -26,7 +26,7 @@ "prettier": "2.3.2" }, "optionalDependencies": { - "colors": "1.4.0" + "@colors/colors": "1.5.0" }, "scripts": { "changelog": "lerna-changelog", @@ -75,7 +75,7 @@ { "displayName": "test", "testMatch": [ - "/test/*.js" + "/test/**/*.js" ] }, { diff --git a/deps/npm/node_modules/cli-table3/src/cell.js b/deps/npm/node_modules/cli-table3/src/cell.js index b8da994a61a5a3..8f507442bb8fc9 100644 --- a/deps/npm/node_modules/cli-table3/src/cell.js +++ b/deps/npm/node_modules/cli-table3/src/cell.js @@ -1,3 +1,4 @@ +const { info, debug } = require('./debug'); const utils = require('./utils'); class Cell { @@ -30,12 +31,19 @@ class Cell { if (['boolean', 'number', 'string'].indexOf(typeof content) !== -1) { this.content = String(content); } else if (!content) { - this.content = ''; + this.content = this.options.href || ''; } else { throw new Error('Content needs to be a primitive, got: ' + typeof content); } this.colSpan = options.colSpan || 1; this.rowSpan = options.rowSpan || 1; + if (this.options.href) { + Object.defineProperty(this, 'href', { + get() { + return this.options.href; + }, + }); + } } mergeTableOptions(tableOptions, cells) { @@ -57,23 +65,35 @@ class Cell { this.head = style.head || tableStyle.head; this.border = style.border || tableStyle.border; - let fixedWidth = tableOptions.colWidths[this.x]; - if (tableOptions.wordWrap && fixedWidth) { - fixedWidth -= this.paddingLeft + this.paddingRight; + this.fixedWidth = tableOptions.colWidths[this.x]; + this.lines = this.computeLines(tableOptions); + + this.desiredWidth = utils.strlen(this.content) + this.paddingLeft + this.paddingRight; + this.desiredHeight = this.lines.length; + } + + computeLines(tableOptions) { + if (this.fixedWidth && (tableOptions.wordWrap || tableOptions.textWrap)) { + this.fixedWidth -= this.paddingLeft + this.paddingRight; if (this.colSpan) { let i = 1; while (i < this.colSpan) { - fixedWidth += tableOptions.colWidths[this.x + i]; + this.fixedWidth += tableOptions.colWidths[this.x + i]; i++; } } - this.lines = utils.colorizeLines(utils.wordWrap(fixedWidth, this.content)); - } else { - this.lines = utils.colorizeLines(this.content.split('\n')); + const { wrapOnWordBoundary = true } = tableOptions; + return this.wrapLines(utils.wordWrap(this.fixedWidth, this.content, wrapOnWordBoundary)); } + return this.wrapLines(this.content.split('\n')); + } - this.desiredWidth = utils.strlen(this.content) + this.paddingLeft + this.paddingRight; - this.desiredHeight = this.lines.length; + wrapLines(computedLines) { + const lines = utils.colorizeLines(computedLines); + if (this.href) { + return lines.map((line) => utils.hyperlink(this.href, line)); + } + return lines; } /** @@ -110,6 +130,12 @@ class Cell { draw(lineNum, spanningCell) { if (lineNum == 'top') return this.drawTop(this.drawRight); if (lineNum == 'bottom') return this.drawBottom(this.drawRight); + let content = utils.truncate(this.content, 10, this.truncate); + if (!lineNum) { + info(`${this.y}-${this.x}: ${this.rowSpan - lineNum}x${this.colSpan} Cell ${content}`); + } else { + // debug(`${lineNum}-${this.x}: 1x${this.colSpan} RowSpanCell ${content}`); + } let padLen = Math.max(this.height - this.lines.length, 0); let padTop; switch (this.vAlign) { @@ -186,7 +212,7 @@ class Cell { wrapWithStyleColors(styleProperty, content) { if (this[styleProperty] && this[styleProperty].length) { try { - let colors = require('colors/safe'); + let colors = require('@colors/colors/safe'); for (let i = this[styleProperty].length - 1; i >= 0; i--) { colors = colors[this[styleProperty][i]]; } @@ -285,7 +311,10 @@ class ColSpanCell { */ constructor() {} - draw() { + draw(lineNum) { + if (typeof lineNum === 'number') { + debug(`${this.y}-${this.x}: 1x1 ColSpanCell`); + } return ''; } @@ -319,21 +348,26 @@ class RowSpanCell { if (lineNum == 'bottom') { return this.originalCell.draw('bottom'); } + debug(`${this.y}-${this.x}: 1x${this.colSpan} RowSpanCell for ${this.originalCell.content}`); return this.originalCell.draw(this.offset + 1 + lineNum); } mergeTableOptions() {} } +function firstDefined(...args) { + return args.filter((v) => v !== undefined && v !== null).shift(); +} + // HELPER FUNCTIONS function setOption(objA, objB, nameB, targetObj) { let nameA = nameB.split('-'); if (nameA.length > 1) { nameA[1] = nameA[1].charAt(0).toUpperCase() + nameA[1].substr(1); nameA = nameA.join(''); - targetObj[nameA] = objA[nameA] || objA[nameB] || objB[nameA] || objB[nameB]; + targetObj[nameA] = firstDefined(objA[nameA], objA[nameB], objB[nameA], objB[nameB]); } else { - targetObj[nameB] = objA[nameB] || objB[nameB]; + targetObj[nameB] = firstDefined(objA[nameB], objB[nameB]); } } @@ -366,6 +400,7 @@ let CHAR_NAMES = [ 'right-mid', 'middle', ]; + module.exports = Cell; module.exports.ColSpanCell = ColSpanCell; module.exports.RowSpanCell = RowSpanCell; diff --git a/deps/npm/node_modules/cli-table3/src/debug.js b/deps/npm/node_modules/cli-table3/src/debug.js new file mode 100644 index 00000000000000..6acfb030321597 --- /dev/null +++ b/deps/npm/node_modules/cli-table3/src/debug.js @@ -0,0 +1,28 @@ +let messages = []; +let level = 0; + +const debug = (msg, min) => { + if (level >= min) { + messages.push(msg); + } +}; + +debug.WARN = 1; +debug.INFO = 2; +debug.DEBUG = 3; + +debug.reset = () => { + messages = []; +}; + +debug.setDebugLevel = (v) => { + level = v; +}; + +debug.warn = (msg) => debug(msg, debug.WARN); +debug.info = (msg) => debug(msg, debug.INFO); +debug.debug = (msg) => debug(msg, debug.DEBUG); + +debug.debugMessages = () => messages; + +module.exports = debug; diff --git a/deps/npm/node_modules/cli-table3/src/layout-manager.js b/deps/npm/node_modules/cli-table3/src/layout-manager.js index fe84ef844da794..3937452274d721 100644 --- a/deps/npm/node_modules/cli-table3/src/layout-manager.js +++ b/deps/npm/node_modules/cli-table3/src/layout-manager.js @@ -1,24 +1,35 @@ +const { warn, debug } = require('./debug'); const Cell = require('./cell'); const { ColSpanCell, RowSpanCell } = Cell; (function () { + function next(alloc, col) { + if (alloc[col] > 0) { + return next(alloc, col + 1); + } + return col; + } + function layoutTable(table) { + let alloc = {}; table.forEach(function (row, rowIndex) { - let prevCell = null; - row.forEach(function (cell, columnIndex) { + let col = 0; + row.forEach(function (cell) { cell.y = rowIndex; - cell.x = prevCell ? prevCell.x + 1 : columnIndex; - for (let y = rowIndex; y >= 0; y--) { - let row2 = table[y]; - let xMax = y === rowIndex ? columnIndex : row2.length; - for (let x = 0; x < xMax; x++) { - let cell2 = row2[x]; - while (cellsConflict(cell, cell2)) { - cell.x++; - } + // Avoid erroneous call to next() on first row + cell.x = rowIndex ? next(alloc, col) : col; + const rowSpan = cell.rowSpan || 1; + const colSpan = cell.colSpan || 1; + if (rowSpan > 1) { + for (let cs = 0; cs < colSpan; cs++) { + alloc[cell.x + cs] = rowSpan; } - prevCell = cell; } + col = cell.x + colSpan; + }); + Object.keys(alloc).forEach((idx) => { + alloc[idx]--; + if (alloc[idx] < 1) delete alloc[idx]; }); }); } @@ -116,6 +127,7 @@ const { ColSpanCell, RowSpanCell } = Cell; function fillInTable(table) { let h_max = maxHeight(table); let w_max = maxWidth(table); + debug(`Max rows: ${h_max}; Max cols: ${w_max}`); for (let y = 0; y < h_max; y++) { for (let x = 0; x < w_max; x++) { if (!conflictExists(table, x, y)) { @@ -130,10 +142,10 @@ const { ColSpanCell, RowSpanCell } = Cell; opts.rowSpan++; y2++; } - let cell = new Cell(opts); cell.x = opts.x; cell.y = opts.y; + warn(`Missing cell at ${cell.y}-${cell.x}.`); insertCell(cell, table[y]); } } @@ -182,6 +194,7 @@ function makeComputeWidths(colSpan, desiredWidth, x, forcedMin) { return function (vals, table) { let result = []; let spanners = []; + let auto = {}; table.forEach(function (row) { row.forEach(function (cell) { if ((cell[colSpan] || 1) > 1) { @@ -205,12 +218,20 @@ function makeComputeWidths(colSpan, desiredWidth, x, forcedMin) { let col = cell[x]; let existingWidth = result[col]; let editableCols = typeof vals[col] === 'number' ? 0 : 1; - for (let i = 1; i < span; i++) { - existingWidth += 1 + result[col + i]; - if (typeof vals[col + i] !== 'number') { - editableCols++; + if (typeof existingWidth === 'number') { + for (let i = 1; i < span; i++) { + existingWidth += 1 + result[col + i]; + if (typeof vals[col + i] !== 'number') { + editableCols++; + } + } + } else { + existingWidth = desiredWidth === 'desiredWidth' ? cell.desiredWidth - 1 : 1; + if (!auto[col] || auto[col] < existingWidth) { + auto[col] = existingWidth; } } + if (cell[desiredWidth] > existingWidth) { let i = 0; while (editableCols > 0 && cell[desiredWidth] > existingWidth) { @@ -225,7 +246,7 @@ function makeComputeWidths(colSpan, desiredWidth, x, forcedMin) { } } - Object.assign(vals, result); + Object.assign(vals, result, auto); for (let j = 0; j < vals.length; j++) { vals[j] = Math.max(forcedMin, vals[j] || 0); } diff --git a/deps/npm/node_modules/cli-table3/src/table.js b/deps/npm/node_modules/cli-table3/src/table.js index 4fb33eccf64067..eb4a9bda9a3649 100644 --- a/deps/npm/node_modules/cli-table3/src/table.js +++ b/deps/npm/node_modules/cli-table3/src/table.js @@ -1,11 +1,38 @@ +const debug = require('./debug'); const utils = require('./utils'); const tableLayout = require('./layout-manager'); class Table extends Array { - constructor(options) { + constructor(opts) { super(); - this.options = utils.mergeOptions(options); + const options = utils.mergeOptions(opts); + Object.defineProperty(this, 'options', { + value: options, + enumerable: options.debug, + }); + + if (options.debug) { + switch (typeof options.debug) { + case 'boolean': + debug.setDebugLevel(debug.WARN); + break; + case 'number': + debug.setDebugLevel(options.debug); + break; + case 'string': + debug.setDebugLevel(parseInt(options.debug, 10)); + break; + default: + debug.setDebugLevel(debug.WARN); + debug.warn(`Debug option is expected to be boolean, number, or string. Received a ${typeof options.debug}`); + } + Object.defineProperty(this, 'messages', { + get() { + return debug.debugMessages(); + }, + }); + } } toString() { @@ -65,6 +92,8 @@ class Table extends Array { } } +Table.reset = () => debug.reset(); + function doDraw(row, lineNum, result) { let line = []; row.forEach(function (cell) { diff --git a/deps/npm/node_modules/cli-table3/src/utils.js b/deps/npm/node_modules/cli-table3/src/utils.js index 1e6254af984d0c..c922c5b9adb62c 100644 --- a/deps/npm/node_modules/cli-table3/src/utils.js +++ b/deps/npm/node_modules/cli-table3/src/utils.js @@ -240,6 +240,7 @@ function mergeOptions(options, defaults) { return ret; } +// Wrap on word boundary function wordWrap(maxLength, input) { let lines = []; let split = input.split(/(\s+)/g); @@ -270,11 +271,32 @@ function wordWrap(maxLength, input) { return lines; } -function multiLineWordWrap(maxLength, input) { +// Wrap text (ignoring word boundaries) +function textWrap(maxLength, input) { + let lines = []; + let line = ''; + function pushLine(str, ws) { + if (line.length && ws) line += ws; + line += str; + while (line.length > maxLength) { + lines.push(line.slice(0, maxLength)); + line = line.slice(maxLength); + } + } + let split = input.split(/(\s+)/g); + for (let i = 0; i < split.length; i += 2) { + pushLine(split[i], i && split[i - 1]); + } + if (line.length) lines.push(line); + return lines; +} + +function multiLineWordWrap(maxLength, input, wrapOnWordBoundary = true) { let output = []; input = input.split('\n'); + const handler = wrapOnWordBoundary ? wordWrap : textWrap; for (let i = 0; i < input.length; i++) { - output.push.apply(output, wordWrap(maxLength, input[i])); + output.push.apply(output, handler(maxLength, input[i])); } return output; } @@ -291,6 +313,17 @@ function colorizeLines(input) { return output; } +/** + * Credit: Matheus Sampaio https://github.com/matheussampaio + */ +function hyperlink(url, text) { + const OSC = '\u001B]'; + const BEL = '\u0007'; + const SEP = ';'; + + return [OSC, '8', SEP, SEP, url || text, BEL, text, OSC, '8', SEP, SEP, BEL].join(''); +} + module.exports = { strlen: strlen, repeat: repeat, @@ -299,4 +332,5 @@ module.exports = { mergeOptions: mergeOptions, wordWrap: multiLineWordWrap, colorizeLines: colorizeLines, + hyperlink, }; diff --git a/deps/npm/node_modules/glob/LICENSE b/deps/npm/node_modules/glob/LICENSE index 42ca266df1d523..39e8fe16f665ae 100644 --- a/deps/npm/node_modules/glob/LICENSE +++ b/deps/npm/node_modules/glob/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above @@ -13,9 +13,3 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -## Glob Logo - -Glob's logo created by Tanya Brassie , licensed -under a Creative Commons Attribution-ShareAlike 4.0 International License -https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/deps/npm/node_modules/glob/common.js b/deps/npm/node_modules/glob/common.js index 8e363b6c1f16a1..fc193ee6fbda5d 100644 --- a/deps/npm/node_modules/glob/common.js +++ b/deps/npm/node_modules/glob/common.js @@ -110,6 +110,8 @@ function setopts (self, pattern, options) { // Note that they are not supported in Glob itself anyway. options.nonegate = true options.nocomment = true + // always treat \ in patterns as escapes, not path separators + options.allowWindowsEscape = true self.minimatch = new Minimatch(pattern, options) self.options = self.minimatch.options diff --git a/deps/npm/node_modules/glob/glob.js b/deps/npm/node_modules/glob/glob.js index afcf82752c390a..37a4d7e60775a3 100644 --- a/deps/npm/node_modules/glob/glob.js +++ b/deps/npm/node_modules/glob/glob.js @@ -342,7 +342,10 @@ Glob.prototype._process = function (pattern, index, inGlobStar, cb) { var read if (prefix === null) read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { if (!prefix || !isAbsolute(prefix)) prefix = '/' + prefix read = prefix diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json index cc1a57a896e9eb..54940cbeb42080 100644 --- a/deps/npm/node_modules/glob/package.json +++ b/deps/npm/node_modules/glob/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "name": "glob", "description": "a little globber", - "version": "7.2.0", + "version": "8.0.1", "repository": { "type": "git", "url": "git://github.com/isaacs/node-glob.git" @@ -14,13 +14,13 @@ "common.js" ], "engines": { - "node": "*" + "node": ">=12" }, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^5.0.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, @@ -28,12 +28,16 @@ "memfs": "^3.2.0", "mkdirp": "0", "rimraf": "^2.2.8", - "tap": "^15.0.6", + "tap": "^16.0.1", "tick": "0.0.6" }, "tap": { "before": "test/00-setup.js", "after": "test/zz-cleanup.js", + "statements": 90, + "branches": 90, + "functions": 90, + "lines": 90, "jobs": 1 }, "scripts": { diff --git a/deps/npm/node_modules/glob/sync.js b/deps/npm/node_modules/glob/sync.js index 4f46f90559a0ca..c705a9c0291dd9 100644 --- a/deps/npm/node_modules/glob/sync.js +++ b/deps/npm/node_modules/glob/sync.js @@ -109,7 +109,10 @@ GlobSync.prototype._process = function (pattern, index, inGlobStar) { var read if (prefix === null) read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { if (!prefix || !isAbsolute(prefix)) prefix = '/' + prefix read = prefix diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index bb6837309821cd..2494ef0d9dd97d 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -20,7 +20,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -46,6 +46,6 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 88968216f54ba0..20d7637724fceb 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -46,7 +46,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "tap": "^16.0.1" }, "dependencies": { @@ -61,6 +61,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmexec/lib/index.js b/deps/npm/node_modules/libnpmexec/lib/index.js index 81d152a20bd6e7..fbe5c5520c381e 100644 --- a/deps/npm/node_modules/libnpmexec/lib/index.js +++ b/deps/npm/node_modules/libnpmexec/lib/index.js @@ -9,15 +9,14 @@ const npmlog = require('npmlog') const mkdirp = require('mkdirp-infer-owner') const npa = require('npm-package-arg') const pacote = require('pacote') -const readPackageJson = require('read-package-json-fast') const cacheInstallDir = require('./cache-install-dir.js') const { fileExists, localFileExists } = require('./file-exists.js') const getBinFromManifest = require('./get-bin-from-manifest.js') -const manifestMissing = require('./manifest-missing.js') const noTTY = require('./no-tty.js') const runScript = require('./run-script.js') const isWindows = require('./is-windows.js') +const _localManifest = Symbol('localManifest') /* istanbul ignore next */ const PATH = ( @@ -86,20 +85,42 @@ const exec = async (opts) => { packages.push(args[0]) } + // figure out whether we need to install stuff, or if local is fine + const localArb = new Arborist({ + ...flatOptions, + path, + }) + const localTree = await localArb.loadActual() + + const getLocalManifest = ({ tree, name }) => { + // look up the package name in the current tree inventory, + // if it's found then return that normalized pkg data + const [node] = tree.inventory.query('packageName', name) + + if (node) { + return { + _id: node.pkgid, + ...node.package, + [_localManifest]: true, + } + } + } + // If we do `npm exec foo`, and have a `foo` locally, then we'll // always use that, so we don't really need to fetch the manifest. // So: run npa on each packages entry, and if it is a name with a - // rawSpec==='', then try to readPackageJson at - // node_modules/${name}/package.json, and only pacote fetch if - // that fails. + // rawSpec==='', then try to find that node name in the tree inventory + // and only pacote fetch if that fails. const manis = await Promise.all(packages.map(async p => { const spec = npa(p, path) if (spec.type === 'tag' && spec.rawSpec === '') { - // fall through to the pacote.manifest() approach - try { - const pj = resolve(path, 'node_modules', spec.name, 'package.json') - return await readPackageJson(pj) - } catch (er) {} + const localManifest = getLocalManifest({ + tree: localTree, + name: spec.name, + }) + if (localManifest) { + return localManifest + } } // Force preferOnline to true so we are making sure to pull in the latest // This is especially useful if the user didn't give us a version, and @@ -114,16 +135,9 @@ const exec = async (opts) => { args[0] = getBinFromManifest(manis[0]) } - // figure out whether we need to install stuff, or if local is fine - const localArb = new Arborist({ - ...flatOptions, - path, - }) - const localTree = await localArb.loadActual() - - // do we have all the packages in manifest list? + // are all packages from the manifest list installed? const needInstall = - manis.some(manifest => manifestMissing({ tree: localTree, manifest })) + manis.some(manifest => !manifest[_localManifest]) if (needInstall) { const { npxCache } = flatOptions @@ -135,16 +149,23 @@ const exec = async (opts) => { }) const tree = await arb.loadActual() + // inspect the npx-space installed tree to check if the package is already + // there, if that's the case also check that it's version matches the same + // version expected by the user requested pkg returned by pacote.manifest + const filterMissingPackagesFromInstallDir = (mani) => { + const localManifest = getLocalManifest({ tree, name: mani.name }) + if (localManifest) { + return localManifest.version !== mani.version + } + return true + } + // at this point, we have to ensure that we get the exact same // version, because it's something that has only ever been installed // by npm exec in the cache install directory - const add = manis.filter(mani => manifestMissing({ - tree, - manifest: { - ...mani, - _from: `${mani.name}@${mani.version}`, - }, - })) + const add = manis + .filter(mani => !mani[_localManifest]) + .filter(filterMissingPackagesFromInstallDir) .map(mani => mani._from) .sort((a, b) => a.localeCompare(b, 'en')) diff --git a/deps/npm/node_modules/libnpmexec/lib/manifest-missing.js b/deps/npm/node_modules/libnpmexec/lib/manifest-missing.js deleted file mode 100644 index aec1281e3a4bf9..00000000000000 --- a/deps/npm/node_modules/libnpmexec/lib/manifest-missing.js +++ /dev/null @@ -1,19 +0,0 @@ -const manifestMissing = ({ tree, manifest }) => { - // if the tree doesn't have a child by that name/version, return true - // true means we need to install it - const child = tree.children.get(manifest.name) - // if no child, we have to load it - if (!child) { - return true - } - - // if no version/tag specified, allow whatever's there - if (manifest._from === `${manifest.name}@`) { - return false - } - - // otherwise the version has to match what we WOULD get - return child.version !== manifest.version -} - -module.exports = manifestMissing diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index 72a1ee983520e9..f41df25140fb20 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "4.0.3", + "version": "4.0.5", "files": [ "bin/", "lib/" @@ -50,7 +50,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "bin-links": "^3.0.0", "tap": "^16.0.1" }, @@ -61,7 +61,7 @@ "chalk": "^4.1.0", "mkdirp-infer-owner": "^2.0.0", "npm-package-arg": "^9.0.1", - "npmlog": "^6.0.1", + "npmlog": "^6.0.2", "pacote": "^13.0.5", "proc-log": "^2.0.0", "read": "^1.0.7", @@ -70,6 +70,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index fe8e8d8e37056d..9efee46511b540 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -45,7 +45,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "tap": "^16.0.1" }, "dependencies": { @@ -56,6 +56,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 99efed490b3c4a..7219e36fcc3225 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -40,7 +40,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -49,6 +49,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index be6086c115cf0b..56adfb6574c864 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -31,7 +31,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "minipass": "^3.1.1", "nock": "^13.2.4", "tap": "^16.0.1" @@ -52,6 +52,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index 1aa1d306a412d1..4d7f9226cfbabb 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -26,7 +26,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "nock": "^13.0.7", "tap": "^16.0.1" }, @@ -47,6 +47,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmpublish/lib/unpublish.js b/deps/npm/node_modules/libnpmpublish/lib/unpublish.js index 91f5252aa33fcf..9b124c11435fbb 100644 --- a/deps/npm/node_modules/libnpmpublish/lib/unpublish.js +++ b/deps/npm/node_modules/libnpmpublish/lib/unpublish.js @@ -1,9 +1,26 @@ 'use strict' +const { URL } = require('url') const npa = require('npm-package-arg') const npmFetch = require('npm-registry-fetch') const semver = require('semver') -const { URL } = require('url') + +// given a tarball url and a registry url, returns just the +// relevant pathname portion of it, so that it can be handled +// elegantly by npm-registry-fetch which only expects pathnames +// and handles the registry hostname via opts +const getPathname = (tarball, registry) => { + const registryUrl = new URL(registry).pathname.slice(1) + let tarballUrl = new URL(tarball).pathname.slice(1) + + // test the tarball url to see if it starts with a possible + // pathname from the registry url, in that case strips that portion + // of it so that we only return the post-registry-url pathname + if (registryUrl) { + tarballUrl = tarballUrl.slice(registryUrl.length) + } + return tarballUrl +} const unpublish = async (spec, opts) => { spec = npa(spec) @@ -82,7 +99,7 @@ const unpublish = async (spec, opts) => { ...opts, query: { write: true }, }) - const tarballUrl = new URL(dist.tarball).pathname.slice(1) + const tarballUrl = getPathname(dist.tarball, opts.registry) await npmFetch(`${tarballUrl}/-rev/${_rev}`, { ...opts, method: 'DELETE', diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index ba6e72297074fc..fec6490d4771c2 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "6.0.3", + "version": "6.0.4", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -28,7 +28,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "libnpmpack": "^4.0.0", "lodash.clonedeep": "^4.5.0", "nock": "^13.2.4", @@ -45,7 +45,7 @@ "normalize-package-data": "^4.0.0", "npm-package-arg": "^9.0.1", "npm-registry-fetch": "^13.0.0", - "semver": "^7.1.3", + "semver": "^7.3.7", "ssri": "^9.0.0" }, "engines": { @@ -53,6 +53,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index 85918e2543a754..9c33fcaaa61cf3 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -29,7 +29,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -48,6 +48,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 52309e1646dd0d..80fb95a028958c 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -19,7 +19,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -42,6 +42,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index 431587f07c4c5a..d374f3e392b5d7 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "3.0.3", + "version": "3.0.4", "main": "lib/index.js", "files": [ "bin/", @@ -31,7 +31,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -40,13 +40,13 @@ "@npmcli/run-script": "^3.0.0", "json-parse-even-better-errors": "^2.3.1", "proc-log": "^2.0.0", - "semver": "^7.3.5" + "semver": "^7.3.7" }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.3.2" + "version": "3.4.2" } } diff --git a/deps/npm/node_modules/lru-cache/index.js b/deps/npm/node_modules/lru-cache/index.js index 2168fd3a67bb41..b63be6e988835e 100644 --- a/deps/npm/node_modules/lru-cache/index.js +++ b/deps/npm/node_modules/lru-cache/index.js @@ -3,7 +3,7 @@ const perf = typeof performance === 'object' && performance && const hasAbortController = typeof AbortController !== 'undefined' -/* istanbul ignore next - minimal backwards compatibility polyfill */ +// minimal backwards-compatibility polyfill const AC = hasAbortController ? AbortController : Object.assign( class AbortController { constructor () { this.signal = new AC.AbortSignal } @@ -36,14 +36,20 @@ const deprecatedProperty = (field, instead) => { } } -const shouldWarn = code => typeof process === 'object' && - process && - !warned.has(code) +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) const warn = (code, what, instead, fn) => { warned.add(code) const msg = `The ${what} is deprecated. Please use ${instead} instead.` - process.emitWarning(msg, 'DeprecationWarning', code, fn) + emitWarning(msg, 'DeprecationWarning', code, fn) } const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) @@ -72,7 +78,10 @@ class ZeroArray extends Array { class Stack { constructor (max) { - const UintArray = max ? getUintArray(max) : Array + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) this.heap = new UintArray(max) this.length = 0 } @@ -92,6 +101,7 @@ class LRUCache { ttlResolution = 1, ttlAutopurge, updateAgeOnGet, + updateAgeOnHas, allowStale, dispose, disposeAfter, @@ -136,7 +146,6 @@ class LRUCache { throw new TypeError('fetchMethod must be a function if specified') } - this.keyMap = new Map() this.keyList = new Array(max).fill(null) this.valList = new Array(max).fill(null) @@ -170,6 +179,7 @@ class LRUCache { this.allowStale = !!allowStale || !!stale this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 ? ttlResolution : 1 this.ttlAutopurge = !!ttlAutopurge @@ -191,7 +201,7 @@ class LRUCache { warned.add(code) const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + 'result in unbounded memory consumption.' - process.emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) } } @@ -207,7 +217,7 @@ class LRUCache { } getRemainingTTL (key) { - return this.has(key) ? Infinity : 0 + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 } initializeTTLTracking () { @@ -292,7 +302,7 @@ class LRUCache { this.sizes[index] = size const maxSize = this.maxSize - this.sizes[index] while (this.calculatedSize > maxSize) { - this.evict() + this.evict(true) } this.calculatedSize += this.sizes[index] } @@ -512,8 +522,8 @@ class LRUCache { if (this.size === 0) { return this.tail } - if (this.size === this.max) { - return this.evict() + if (this.size === this.max && this.max !== 0) { + return this.evict(false) } if (this.free.length !== 0) { return this.free.pop() @@ -525,12 +535,12 @@ class LRUCache { pop () { if (this.size) { const val = this.valList[this.head] - this.evict() + this.evict(true) return val } } - evict () { + evict (free) { const head = this.head const k = this.keyList[head] const v = this.valList[head] @@ -543,14 +553,29 @@ class LRUCache { } } this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } this.head = this.next[head] this.keyMap.delete(k) this.size -- return head } - has (k) { - return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k)) + has (k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + return true + } + } + return false } // like get(), but without any LRU updating or TTL expiration diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json index ca75abf0040219..32fb9da24e56e9 100644 --- a/deps/npm/node_modules/lru-cache/package.json +++ b/deps/npm/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.7.3", + "version": "7.8.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -23,6 +23,7 @@ "@size-limit/preset-small-lib": "^7.0.8", "benchmark": "^2.1.4", "clock-mock": "^1.0.4", + "heapdump": "^0.3.15", "size-limit": "^7.0.8", "tap": "^15.1.6" }, @@ -34,7 +35,10 @@ "node": ">=12" }, "tap": { - "coverage-map": "map.js" + "coverage-map": "map.js", + "node-arg": [ + "--expose-gc" + ] }, "size-limit": [ { diff --git a/deps/npm/node_modules/glob/node_modules/brace-expansion/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/brace-expansion/LICENSE similarity index 100% rename from deps/npm/node_modules/glob/node_modules/brace-expansion/LICENSE rename to deps/npm/node_modules/node-gyp/node_modules/brace-expansion/LICENSE diff --git a/deps/npm/node_modules/glob/node_modules/brace-expansion/index.js b/deps/npm/node_modules/node-gyp/node_modules/brace-expansion/index.js similarity index 100% rename from deps/npm/node_modules/glob/node_modules/brace-expansion/index.js rename to deps/npm/node_modules/node-gyp/node_modules/brace-expansion/index.js diff --git a/deps/npm/node_modules/glob/node_modules/brace-expansion/package.json b/deps/npm/node_modules/node-gyp/node_modules/brace-expansion/package.json similarity index 100% rename from deps/npm/node_modules/glob/node_modules/brace-expansion/package.json rename to deps/npm/node_modules/node-gyp/node_modules/brace-expansion/package.json diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/glob/LICENSE new file mode 100644 index 00000000000000..42ca266df1d523 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/common.js b/deps/npm/node_modules/node-gyp/node_modules/glob/common.js new file mode 100644 index 00000000000000..8e363b6c1f16a1 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/common.js @@ -0,0 +1,236 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js b/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js new file mode 100644 index 00000000000000..afcf82752c390a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js @@ -0,0 +1,787 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/package.json b/deps/npm/node_modules/node-gyp/node_modules/glob/package.json new file mode 100644 index 00000000000000..cc1a57a896e9eb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/package.json @@ -0,0 +1,52 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.2.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/sync.js b/deps/npm/node_modules/node-gyp/node_modules/glob/sync.js new file mode 100644 index 00000000000000..4f46f90559a0ca --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/sync.js @@ -0,0 +1,483 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/deps/npm/node_modules/glob/node_modules/minimatch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE similarity index 100% rename from deps/npm/node_modules/glob/node_modules/minimatch/LICENSE rename to deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE diff --git a/deps/npm/node_modules/glob/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js similarity index 100% rename from deps/npm/node_modules/glob/node_modules/minimatch/minimatch.js rename to deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js diff --git a/deps/npm/node_modules/glob/node_modules/minimatch/package.json b/deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json similarity index 100% rename from deps/npm/node_modules/glob/node_modules/minimatch/package.json rename to deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json diff --git a/deps/npm/node_modules/npm-packlist/lib/index.js b/deps/npm/node_modules/npm-packlist/lib/index.js index 30d99dc873f267..7e4093dfb39298 100644 --- a/deps/npm/node_modules/npm-packlist/lib/index.js +++ b/deps/npm/node_modules/npm-packlist/lib/index.js @@ -31,6 +31,7 @@ const packageMustHavesRE = new RegExp(`^(${packageMustHaveFileNames})(\\..*[^~$] const fs = require('fs') const glob = require('glob') +const globify = pattern => pattern.split('\\').join('/') const pathHasPkg = (input) => { if (!input.startsWith('node_modules/')) { @@ -428,7 +429,7 @@ class Walker extends IgnoreWalker { } globFiles (pattern, cb) { - glob(pattern, { dot: true, cwd: this.path, nocase: true }, cb) + glob(globify(pattern), { dot: true, cwd: this.path, nocase: true }, cb) } readPackageJson (entries) { diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json index 632524d789ca81..ab5e46359d09b6 100644 --- a/deps/npm/node_modules/npm-packlist/package.json +++ b/deps/npm/node_modules/npm-packlist/package.json @@ -1,13 +1,13 @@ { "name": "npm-packlist", - "version": "5.0.0", + "version": "5.0.2", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" }, "main": "lib", "dependencies": { - "glob": "^7.2.0", + "glob": "^8.0.1", "ignore-walk": "^5.0.1", "npm-bundled": "^1.1.2", "npm-normalize-package-bin": "^1.0.1" @@ -20,7 +20,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.2.2", + "@npmcli/template-oss": "3.4.1", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, @@ -56,6 +56,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.2.2" + "version": "3.4.1" } } diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json index 68b04bba6d9006..5e8f2d2391abd6 100644 --- a/deps/npm/node_modules/npm-profile/package.json +++ b/deps/npm/node_modules/npm-profile/package.json @@ -1,27 +1,28 @@ { "name": "npm-profile", - "version": "6.0.2", + "version": "6.0.3", "description": "Library for updating an npmjs.com profile", "keywords": [], "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^13.0.0", + "npm-registry-fetch": "^13.0.1", "proc-log": "^2.0.0" }, "main": "./lib/index.js", "repository": { "type": "git", - "url": "git+https://github.com/npm/npm-profile.git" + "url": "https://github.com/npm/npm-profile.git" }, "files": [ - "bin", - "lib" + "bin/", + "lib/" ], "devDependencies": { - "@npmcli/template-oss": "^2.7.1", + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.4.1", "nock": "^13.2.4", - "tap": "^15.1.6" + "tap": "^16.0.1" }, "scripts": { "preversion": "npm test", @@ -30,18 +31,19 @@ "posttest": "npm run lint", "test": "tap", "snap": "tap", - "lint": "eslint '**/*.js'", - "postlint": "npm-template-check", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", "lintfix": "npm run lint -- --fix", - "template-copy": "npm-template-copy --force" + "template-oss-apply": "template-oss-apply --force" }, "tap": { "check-coverage": true }, "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "templateOSS": { - "version": "2.7.1" + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.4.1" } } diff --git a/deps/npm/node_modules/npm-registry-fetch/lib/errors.js b/deps/npm/node_modules/npm-registry-fetch/lib/errors.js index 0efc923e3e900f..cf5ddba6f300cb 100644 --- a/deps/npm/node_modules/npm-registry-fetch/lib/errors.js +++ b/deps/npm/node_modules/npm-registry-fetch/lib/errors.js @@ -4,7 +4,7 @@ const url = require('url') function packageName (href) { try { - let basePath = new url.URL(href).pathname.substr(1) + let basePath = new url.URL(href).pathname.slice(1) if (!basePath.match(/^-/)) { basePath = basePath.split('/') var index = basePath.indexOf('_rewrite') diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json index 9e15f627cd5a73..0ce12c633637a6 100644 --- a/deps/npm/node_modules/npm-registry-fetch/package.json +++ b/deps/npm/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "13.1.0", + "version": "13.1.1", "description": "Fetch-based http client for use with npm registry APIs", "main": "lib", "files": [ @@ -44,12 +44,12 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.1.2", + "@npmcli/template-oss": "3.3.2", "cacache": "^16.0.2", "nock": "^13.2.4", "require-inject": "^1.4.4", - "ssri": "^8.0.1", - "tap": "^15.1.6" + "ssri": "^9.0.0", + "tap": "^16.0.1" }, "tap": { "check-coverage": true, @@ -60,6 +60,6 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.1.2" + "version": "3.3.2" } } diff --git a/deps/npm/node_modules/npmlog/package.json b/deps/npm/node_modules/npmlog/package.json index cf38f7fca475ca..bdb5a384781ce8 100644 --- a/deps/npm/node_modules/npmlog/package.json +++ b/deps/npm/node_modules/npmlog/package.json @@ -2,48 +2,50 @@ "author": "GitHub Inc.", "name": "npmlog", "description": "logger for npm", - "version": "6.0.1", + "version": "6.0.2", "repository": { "type": "git", "url": "https://github.com/npm/npmlog.git" }, "main": "lib/log.js", "files": [ - "bin", - "lib" + "bin/", + "lib/" ], "scripts": { "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint '**/*.js'", + "lint": "eslint \"**/*.js\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", + "postlint": "template-oss-check", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "snap": "tap", - "template-copy": "npm-template-copy --force" + "template-oss-apply": "template-oss-apply --force" }, "dependencies": { "are-we-there-yet": "^3.0.0", "console-control-strings": "^1.1.0", - "gauge": "^4.0.0", + "gauge": "^4.0.3", "set-blocking": "^2.0.0" }, "devDependencies": { - "@npmcli/template-oss": "^2.7.1", - "tap": "^15.1.6" + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.4.1", + "tap": "^16.0.1" }, "license": "ISC", "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "tap": { "branches": 95 }, "templateOSS": { - "version": "2.7.1" + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.4.1" } } diff --git a/deps/npm/node_modules/read-package-json/lib/read-json.js b/deps/npm/node_modules/read-package-json/lib/read-json.js index d0ee9af1ae7505..c55eca32259edf 100644 --- a/deps/npm/node_modules/read-package-json/lib/read-json.js +++ b/deps/npm/node_modules/read-package-json/lib/read-json.js @@ -109,10 +109,10 @@ function parseJson (file, er, d, log, strict, cb) { delete data[key] } } - } catch (er) { + } catch (jsonErr) { data = parseIndex(d) if (!data) { - return cb(parseError(er, file)) + return cb(parseError(jsonErr, file)) } } @@ -120,11 +120,11 @@ function parseJson (file, er, d, log, strict, cb) { } function extrasCached (file, d, data, log, strict, cb) { - extras(file, data, log, strict, function (err, data) { + extras(file, data, log, strict, function (err, extrasData) { if (!err) { - cache[d] = jsonClone(data) + cache[d] = jsonClone(extrasData) } - cb(err, data) + cb(err, extrasData) }) } @@ -299,8 +299,8 @@ function readme (file, data, cb) { return cb(er) } // don't accept directories. - files = files.filter(function (file) { - return !file.match(/\/$/) + files = files.filter(function (filtered) { + return !filtered.match(/\/$/) }) if (!files.length) { return cb() @@ -328,12 +328,12 @@ function preferMarkdownReadme (files) { function readme_ (file, data, rm, cb) { var rmfn = path.basename(rm) - fs.readFile(rm, 'utf8', function (er, rm) { + fs.readFile(rm, 'utf8', function (er, rmData) { // maybe not readable, or something. if (er) { return cb() } - data.readme = rm + data.readme = rmData data.readmeFilename = rmfn return cb(er, data) }) @@ -346,11 +346,11 @@ function mans (file, data, cb) { } const dirname = path.dirname(file) cwd = path.resolve(path.dirname(file), cwd) - glob('**/*.[0-9]', { cwd }, function (er, mans) { + glob('**/*.[0-9]', { cwd }, function (er, mansGlob) { if (er) { return cb(er) } - data.man = mans.map(man => + data.man = mansGlob.map(man => path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/') ) return cb(null, data) @@ -366,17 +366,17 @@ function bins (file, data, cb) { } m = path.resolve(path.dirname(file), m) - glob('**', { cwd: m }, function (er, bins) { + glob('**', { cwd: m }, function (er, binsGlob) { if (er) { return cb(er) } - bins_(file, data, bins, cb) + bins_(file, data, binsGlob, cb) }) } -function bins_ (file, data, bins, cb) { +function bins_ (file, data, binsGlob, cb) { var m = (data.directories && data.directories.bin) || '.' - data.bin = bins.reduce(function (acc, mf) { + data.bin = binsGlob.reduce(function (acc, mf) { if (mf && mf.charAt(0) !== '.') { var f = path.basename(mf) acc[f] = path.join(m, mf) @@ -412,7 +412,7 @@ function githead (file, data, cb) { } var dir = path.dirname(file) var head = path.resolve(dir, '.git/HEAD') - fs.readFile(head, 'utf8', function (er, head) { + fs.readFile(head, 'utf8', function (er, headData) { if (er) { var parent = path.dirname(dir) if (parent === dir) { @@ -420,7 +420,7 @@ function githead (file, data, cb) { } return githead(dir, data, cb) } - githead_(data, dir, head, cb) + githead_(data, dir, headData, cb) }) } @@ -431,11 +431,11 @@ function githead_ (data, dir, head, cb) { } var headRef = head.replace(/^ref: /, '').trim() var headFile = path.resolve(dir, '.git', headRef) - fs.readFile(headFile, 'utf8', function (er, head) { - if (er || !head) { + fs.readFile(headFile, 'utf8', function (er, headData) { + if (er || !headData) { var packFile = path.resolve(dir, '.git/packed-refs') - return fs.readFile(packFile, 'utf8', function (er, refs) { - if (er || !refs) { + return fs.readFile(packFile, 'utf8', function (readFileErr, refs) { + if (readFileErr || !refs) { return cb(null, data) } refs = refs.split('\n') @@ -449,8 +449,8 @@ function githead_ (data, dir, head, cb) { return cb(null, data) }) } - head = head.replace(/^ref: /, '').trim() - data.gitHead = head + headData = headData.replace(/^ref: /, '').trim() + data.gitHead = headData return cb(null, data) }) } diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json index 038047c9709418..8bb77ca01f6537 100644 --- a/deps/npm/node_modules/read-package-json/package.json +++ b/deps/npm/node_modules/read-package-json/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json", - "version": "5.0.0", + "version": "5.0.1", "author": "GitHub Inc.", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { @@ -14,34 +14,35 @@ "release": "standard-version -s", "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint '**/*.js'", + "lint": "eslint \"**/*.js\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", - "postlint": "npm-template-check", - "template-copy": "npm-template-copy --force", + "postlint": "template-oss-check", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", - "snap": "tap" + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" }, "dependencies": { - "glob": "^7.2.0", + "glob": "^8.0.1", "json-parse-even-better-errors": "^2.3.1", "normalize-package-data": "^4.0.0", "npm-normalize-package-bin": "^1.0.1" }, "devDependencies": { - "@npmcli/template-oss": "^2.9.2", - "tap": "^15.0.9" + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.4.1", + "tap": "^16.0.1" }, "license": "ISC", "files": [ - "bin", - "lib" + "bin/", + "lib/" ], "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16" + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "tap": { "branches": 68, @@ -50,6 +51,7 @@ "statements": 77 }, "templateOSS": { - "version": "2.9.2" + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.4.1" } } diff --git a/deps/npm/node_modules/rimraf/node_modules/brace-expansion/LICENSE b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/LICENSE new file mode 100644 index 00000000000000..de3226673c3874 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deps/npm/node_modules/rimraf/node_modules/brace-expansion/index.js b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/index.js new file mode 100644 index 00000000000000..2b6f4f85c951fc --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/index.js @@ -0,0 +1,200 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} diff --git a/deps/npm/node_modules/rimraf/node_modules/brace-expansion/package.json b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/package.json new file mode 100644 index 00000000000000..a18faa8fd67b82 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/LICENSE b/deps/npm/node_modules/rimraf/node_modules/glob/LICENSE new file mode 100644 index 00000000000000..42ca266df1d523 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/common.js b/deps/npm/node_modules/rimraf/node_modules/glob/common.js new file mode 100644 index 00000000000000..8e363b6c1f16a1 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/glob/common.js @@ -0,0 +1,236 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/glob.js b/deps/npm/node_modules/rimraf/node_modules/glob/glob.js new file mode 100644 index 00000000000000..afcf82752c390a --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/glob/glob.js @@ -0,0 +1,787 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/package.json b/deps/npm/node_modules/rimraf/node_modules/glob/package.json new file mode 100644 index 00000000000000..cc1a57a896e9eb --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/glob/package.json @@ -0,0 +1,52 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.2.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/deps/npm/node_modules/rimraf/node_modules/glob/sync.js b/deps/npm/node_modules/rimraf/node_modules/glob/sync.js new file mode 100644 index 00000000000000..4f46f90559a0ca --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/glob/sync.js @@ -0,0 +1,483 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/deps/npm/node_modules/rimraf/node_modules/minimatch/LICENSE b/deps/npm/node_modules/rimraf/node_modules/minimatch/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/rimraf/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/rimraf/node_modules/minimatch/minimatch.js new file mode 100644 index 00000000000000..fda45ade7cfc35 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/deps/npm/node_modules/rimraf/node_modules/minimatch/package.json b/deps/npm/node_modules/rimraf/node_modules/minimatch/package.json new file mode 100644 index 00000000000000..566efdfe45cb80 --- /dev/null +++ b/deps/npm/node_modules/rimraf/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/deps/npm/node_modules/semver/bin/semver.js b/deps/npm/node_modules/semver/bin/semver.js index 779b8b0cdc2aa0..8d1b55720e0ab7 100755 --- a/deps/npm/node_modules/semver/bin/semver.js +++ b/deps/npm/node_modules/semver/bin/semver.js @@ -37,8 +37,9 @@ const main = () => { let a = argv.shift() const indexOfEqualSign = a.indexOf('=') if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) a = a.slice(0, indexOfEqualSign) - argv.unshift(a.slice(indexOfEqualSign + 1)) + argv.unshift(value) } switch (a) { case '-rv': case '-rev': case '--rev': case '--reverse': diff --git a/deps/npm/node_modules/semver/classes/semver.js b/deps/npm/node_modules/semver/classes/semver.js index ed81a7ec6cbfe6..af62955194793c 100644 --- a/deps/npm/node_modules/semver/classes/semver.js +++ b/deps/npm/node_modules/semver/classes/semver.js @@ -265,7 +265,7 @@ class SemVer { if (identifier) { // 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { if (isNaN(this.prerelease[1])) { this.prerelease = [identifier, 0] } diff --git a/deps/npm/node_modules/semver/functions/inc.js b/deps/npm/node_modules/semver/functions/inc.js index aa4d83ab4c2895..62d1da2c4093bf 100644 --- a/deps/npm/node_modules/semver/functions/inc.js +++ b/deps/npm/node_modules/semver/functions/inc.js @@ -7,7 +7,10 @@ const inc = (version, release, options, identifier) => { } try { - return new SemVer(version, options).inc(release, identifier).version + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier).version } catch (er) { return null } diff --git a/deps/npm/node_modules/semver/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/semver/node_modules/lru-cache/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/semver/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/semver/node_modules/lru-cache/index.js b/deps/npm/node_modules/semver/node_modules/lru-cache/index.js new file mode 100644 index 00000000000000..573b6b85b9779d --- /dev/null +++ b/deps/npm/node_modules/semver/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/deps/npm/node_modules/semver/node_modules/lru-cache/package.json b/deps/npm/node_modules/semver/node_modules/lru-cache/package.json new file mode 100644 index 00000000000000..43b7502c3e7c79 --- /dev/null +++ b/deps/npm/node_modules/semver/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json index b04e0d328268fd..7898f5902cb737 100644 --- a/deps/npm/node_modules/semver/package.json +++ b/deps/npm/node_modules/semver/package.json @@ -1,6 +1,6 @@ { "name": "semver", - "version": "7.3.6", + "version": "7.3.7", "description": "The semantic version parser used by npm.", "main": "index.js", "scripts": { @@ -18,7 +18,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.2.2", + "@npmcli/template-oss": "3.3.2", "tap": "^16.0.0" }, "license": "ISC", @@ -44,15 +44,16 @@ "coverage-map": "map.js" }, "engines": { - "node": "^10.0.0 || ^12.0.0 || ^14.0.0 || >=16.0.0" + "node": ">=10" }, "dependencies": { - "lru-cache": "^7.4.0" + "lru-cache": "^6.0.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.2.2", + "version": "3.3.2", + "engines": ">=10", "ciVersions": [ "10.0.0", "10.x", diff --git a/deps/npm/package.json b/deps/npm/package.json index da2cba63e220a1..e1f683219311aa 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "8.7.0", + "version": "8.8.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -60,19 +60,19 @@ "@npmcli/ci-detect": "^2.0.0", "@npmcli/config": "^4.1.0", "@npmcli/fs": "^2.1.0", - "@npmcli/map-workspaces": "^2.0.2", + "@npmcli/map-workspaces": "^2.0.3", "@npmcli/package-json": "^2.0.0", "@npmcli/run-script": "^3.0.1", "abbrev": "~1.1.1", "archy": "~1.0.0", - "cacache": "^16.0.4", + "cacache": "^16.0.6", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0", - "cli-table3": "^0.6.1", + "cli-table3": "^0.6.2", "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.12", - "glob": "^7.2.0", + "glob": "^8.0.1", "graceful-fs": "^4.2.10", "hosted-git-info": "^5.0.0", "ini": "^3.0.0", @@ -102,21 +102,21 @@ "npm-install-checks": "^5.0.0", "npm-package-arg": "^9.0.2", "npm-pick-manifest": "^7.0.1", - "npm-profile": "^6.0.2", - "npm-registry-fetch": "^13.1.0", + "npm-profile": "^6.0.3", + "npm-registry-fetch": "^13.1.1", "npm-user-validate": "^1.0.1", - "npmlog": "^6.0.1", + "npmlog": "^6.0.2", "opener": "^1.5.2", "pacote": "^13.1.1", "parse-conflict-json": "^2.0.2", "proc-log": "^2.0.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", - "read-package-json": "^5.0.0", + "read-package-json": "^5.0.1", "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", - "semver": "^7.3.6", + "semver": "^7.3.7", "ssri": "^9.0.0", "tar": "^6.1.11", "text-table": "~0.2.0", @@ -200,7 +200,7 @@ ], "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.3.2", + "@npmcli/template-oss": "3.4.2", "licensee": "^8.2.0", "nock": "^13.2.4", "spawk": "^1.7.1", @@ -236,7 +236,7 @@ "templateOSS": { "rootRepo": false, "rootModule": false, - "version": "3.3.2" + "version": "3.4.2" }, "license": "Artistic-2.0", "engines": { diff --git a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs index d98c16f7905a5b..c3680933e6a793 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs @@ -5,7 +5,7 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/commands/audit.js TAP audit fix > lockfile has test-dep-a@1.0.1 1`] = ` +exports[`test/lib/commands/audit.js TAP audit fix - bulk endpoint > lockfile has test-dep-a@1.0.1 1`] = ` { "name": "test-dep", "version": "1.0.0", @@ -34,13 +34,28 @@ exports[`test/lib/commands/audit.js TAP audit fix > lockfile has test-dep-a@1.0. ` -exports[`test/lib/commands/audit.js TAP audit fix > must match snapshot 1`] = ` +exports[`test/lib/commands/audit.js TAP audit fix - bulk endpoint > must match snapshot 1`] = ` added 1 package, and audited 2 packages in xxx found 0 vulnerabilities ` +exports[`test/lib/commands/audit.js TAP fallback audit > must match snapshot 1`] = ` +# npm audit report + +test-dep-a 1.0.0 +Severity: high +Test advisory 100 - https://github.com/advisories/GHSA-100 +fix available via \`npm audit fix\` +node_modules/test-dep-a + +1 high severity vulnerability + +To address all issues, run: + npm audit fix +` + exports[`test/lib/commands/audit.js TAP json audit > must match snapshot 1`] = ` { "auditReportVersion": 2, @@ -98,14 +113,14 @@ exports[`test/lib/commands/audit.js TAP json audit > must match snapshot 1`] = ` exports[`test/lib/commands/audit.js TAP normal audit > must match snapshot 1`] = ` # npm audit report -test-dep-a * +test-dep-a 1.0.0 Severity: high Test advisory 100 - https://github.com/advisories/GHSA-100 -No fix available +fix available via \`npm audit fix\` node_modules/test-dep-a 1 high severity vulnerability -Some issues need review, and may require choosing -a different dependency. +To address all issues, run: + npm audit fix ` diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs index 444d1f1db7c701..896991c32d99c3 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs @@ -81,6 +81,7 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna "init.license": "ISC", "init.module": "{HOME}/.npm-init.js", "init.version": "1.0.0", + "install-links": false, "key": null, "legacy-bundling": false, "legacy-peer-deps": false, @@ -234,6 +235,7 @@ init.author.url = "" init.license = "ISC" init.module = "{HOME}/.npm-init.js" init.version = "1.0.0" +install-links = false json = false key = null legacy-bundling = false diff --git a/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs index 9c9e31b32db6cc..1c8d3e59c3fe19 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/ls.js.test.cjs @@ -114,23 +114,6 @@ test-npm-ls@1.0.0 ` -exports[`test/lib/commands/ls.js TAP ls --only=development > should output tree containing only development deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---only-development -\`-- dev-dep@1.0.0 - \`-- foo@1.0.0 - \`-- dog@1.0.0 - -` - -exports[`test/lib/commands/ls.js TAP ls --only=prod > should output tree containing only prod deps 1`] = ` -test-npm-ls@1.0.0 {CWD}/tap-testdir-ls-ls---only-prod -+-- chai@1.0.0 -+-- optional-dep@1.0.0 -\`-- prod-dep@1.0.0 - \`-- dog@2.0.0 - -` - exports[`test/lib/commands/ls.js TAP ls --parseable --depth=0 > should output tree containing only top-level dependencies 1`] = ` {CWD}/tap-testdir-ls-ls---parseable---depth-0 {CWD}/tap-testdir-ls-ls---parseable---depth-0/node_modules/chai @@ -204,21 +187,6 @@ exports[`test/lib/commands/ls.js TAP ls --parseable --long with extraneous deps {CWD}/tap-testdir-ls-ls---parseable---long-with-extraneous-deps/node_modules/dog:dog@1.0.0 ` -exports[`test/lib/commands/ls.js TAP ls --parseable --only=development > should output tree containing only development deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---only-development -{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/dev-dep -{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/foo -{CWD}/tap-testdir-ls-ls---parseable---only-development/node_modules/dog -` - -exports[`test/lib/commands/ls.js TAP ls --parseable --only=prod > should output tree containing only prod deps 1`] = ` -{CWD}/tap-testdir-ls-ls---parseable---only-prod -{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/chai -{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/optional-dep -{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/prod-dep -{CWD}/tap-testdir-ls-ls---parseable---only-prod/node_modules/prod-dep/node_modules/dog -` - exports[`test/lib/commands/ls.js TAP ls --parseable --production > should output tree containing production deps 1`] = ` {CWD}/tap-testdir-ls-ls---parseable---production {CWD}/tap-testdir-ls-ls---parseable---production/node_modules/chai diff --git a/deps/npm/tap-snapshots/test/lib/commands/owner.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/owner.js.test.cjs deleted file mode 100644 index f3d7335e473074..00000000000000 --- a/deps/npm/tap-snapshots/test/lib/commands/owner.js.test.cjs +++ /dev/null @@ -1,20 +0,0 @@ -/* IMPORTANT - * This snapshot file is auto-generated, but designed for humans. - * It should be checked into source control and tracked carefully. - * Re-generate by setting TAP_SNAPSHOT=1 and running tests. - * Make sure to inspect the output below. Do not ignore changes! - */ -'use strict' -exports[`test/lib/commands/owner.js TAP owner ls > should output owners of 1`] = ` -nlf -ruyadorno -darcyclarke -isaacs -` - -exports[`test/lib/commands/owner.js TAP owner ls no args > should output owners of cwd package 1`] = ` -nlf -ruyadorno -darcyclarke -isaacs -` diff --git a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs index dec7727834fa6f..e2d248edf5b6c6 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs @@ -5,153 +5,263 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/commands/publish.js TAP private workspaces colorless > should output all publishes 1`] = ` -Array [ - "+ @npmcli/b@1.0.0", -] +exports[`test/lib/commands/publish.js TAP _auth config default registry > new package version 1`] = ` ++ test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP bare _auth and registry config > new package version 1`] = ` ++ @npm/test-package@1.0.0 ` -exports[`test/lib/commands/publish.js TAP private workspaces colorless > should publish all non-private workspaces 1`] = ` +exports[`test/lib/commands/publish.js TAP dry-run > must match snapshot 1`] = ` Array [ - Object { - "_id": "@npmcli/b@1.0.0", - "name": "@npmcli/b", - "readme": "ERROR: No README data found!", - "version": "1.0.0", - }, + Array [ + "", + ], + Array [ + "", + "package: test-package@1.0.0", + ], + Array [ + "=== Tarball Contents ===", + ], + Array [ + "", + "87B package.json", + ], + Array [ + "=== Tarball Details ===", + ], + Array [ + "", + String( + name: test-package + version: 1.0.0 + filename: test-package-1.0.0.tgz + package size: 160 B + unpacked size: 87 B + shasum:{sha} + integrity:{sha} + total files: 1 + ), + ], + Array [ + "", + "", + ], + Array [ + "", + "Publishing to https://registry.npmjs.org/ (dry-run)", + ], ] ` -exports[`test/lib/commands/publish.js TAP private workspaces with color > should output all publishes 1`] = ` +exports[`test/lib/commands/publish.js TAP has auth for scope configured registry > new package version 1`] = ` ++ @npm/test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP ignore-scripts > new package version 1`] = ` ++ test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP json > must match snapshot 1`] = ` Array [ - "+ @npmcli/b@1.0.0", + Array [ + "", + "Publishing to https://registry.npmjs.org/", + ], ] ` -exports[`test/lib/commands/publish.js TAP private workspaces with color > should publish all non-private workspaces 1`] = ` +exports[`test/lib/commands/publish.js TAP json > new package json 1`] = ` +{ + "id": "test-package@1.0.0", + "name": "test-package", + "version": "1.0.0", + "size": 160, + "unpackedSize": 87, + "shasum": "{sha}", + "integrity": "{sha}", + "filename": "test-package-1.0.0.tgz", + "files": [ + { + "path": "package.json", + "size": 87, + "mode": 420 + } + ], + "entryCount": 1, + "bundled": [] +} +` + +exports[`test/lib/commands/publish.js TAP no auth dry-run > must match snapshot 1`] = ` ++ test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP no auth dry-run > warns about auth being needed 1`] = ` Array [ - Object { - "_id": "@npmcli/b@1.0.0", - "name": "@npmcli/b", - "readme": "ERROR: No README data found!", - "version": "1.0.0", - }, + Array [ + "", + "This command requires you to be logged in to https://registry.npmjs.org/ (dry-run)", + ], ] ` -exports[`test/lib/commands/publish.js TAP workspaces all workspaces > should output all publishes 1`] = ` +exports[`test/lib/commands/publish.js TAP re-loads publishConfig.registry if added during script process > new package version 1`] = ` ++ test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP respects publishConfig.registry, runs appropriate scripts > new package version 1`] = ` + +` + +exports[`test/lib/commands/publish.js TAP scoped _auth config scoped registry > new package version 1`] = ` ++ @npm/test-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP tarball > must match snapshot 1`] = ` Array [ - "+ workspace-a@1.2.3-a", - "+ workspace-b@1.2.3-n", - "+ workspace-n@1.2.3-n", + Array [ + "", + ], + Array [ + "", + "package: test-tar-package@1.0.0", + ], + Array [ + "=== Tarball Contents ===", + ], + Array [ + "", + String( + 26B index.js + 98B package.json + ), + ], + Array [ + "=== Tarball Details ===", + ], + Array [ + "", + String( + name: test-tar-package + version: 1.0.0 + filename: test-tar-package-1.0.0.tgz + package size: 218 B + unpacked size: 124 B + shasum:{sha} + integrity:{sha} + total files: 2 + ), + ], + Array [ + "", + "", + ], + Array [ + "", + "Publishing to https://registry.npmjs.org/", + ], ] ` -exports[`test/lib/commands/publish.js TAP workspaces all workspaces > should publish all workspaces 1`] = ` +exports[`test/lib/commands/publish.js TAP tarball > new package json 1`] = ` ++ test-tar-package@1.0.0 +` + +exports[`test/lib/commands/publish.js TAP workspaces all workspaces - color > all public workspaces 1`] = ` ++ workspace-a@1.2.3-a ++ workspace-b@1.2.3-n ++ workspace-n@1.2.3-n +` + +exports[`test/lib/commands/publish.js TAP workspaces all workspaces - color > warns about skipped private workspace in color 1`] = ` Array [ - Object { - "_id": "workspace-a@1.2.3-a", - "name": "workspace-a", - "readme": "ERROR: No README data found!", - "repository": Object { - "type": "git", - "url": "http://repo.workspace-a/", - }, - "version": "1.2.3-a", - }, - Object { - "_id": "workspace-b@1.2.3-n", - "bugs": Object { - "url": "https://github.com/npm/workspace-b/issues", - }, - "homepage": "https://github.com/npm/workspace-b#readme", - "name": "workspace-b", - "readme": "ERROR: No README data found!", - "repository": Object { - "type": "git", - "url": "git+https://github.com/npm/workspace-b.git", - }, - "version": "1.2.3-n", - }, - Object { - "_id": "workspace-n@1.2.3-n", - "name": "workspace-n", - "readme": "ERROR: No README data found!", - "version": "1.2.3-n", - }, + Array [ + "publish", + "Skipping workspace \\u001b[32mworkspace-p\\u001b[39m, marked as \\u001b[1mprivate\\u001b[22m", + ], ] ` -exports[`test/lib/commands/publish.js TAP workspaces json > should output all publishes as json 1`] = ` +exports[`test/lib/commands/publish.js TAP workspaces all workspaces - no color > all public workspaces 1`] = ` ++ workspace-a@1.2.3-a ++ workspace-b@1.2.3-n ++ workspace-n@1.2.3-n +` + +exports[`test/lib/commands/publish.js TAP workspaces all workspaces - no color > warns about skipped private workspace 1`] = ` Array [ - String( - { - "workspace-a": { - "id": "workspace-a@1.2.3-a" - }, - "workspace-b": { - "id": "workspace-b@1.2.3-n" - }, - "workspace-n": { - "id": "workspace-n@1.2.3-n" - } - } - ), + Array [ + "publish", + "Skipping workspace workspace-p, marked as private", + ], ] ` -exports[`test/lib/commands/publish.js TAP workspaces json > should publish all workspaces 1`] = ` -Array [ - Object { - "_id": "workspace-a@1.2.3-a", +exports[`test/lib/commands/publish.js TAP workspaces json > all workspaces in json 1`] = ` +{ + "workspace-a": { + "id": "workspace-a@1.2.3-a", "name": "workspace-a", - "readme": "ERROR: No README data found!", - "repository": Object { - "type": "git", - "url": "http://repo.workspace-a/", - }, "version": "1.2.3-a", + "size": 162, + "unpackedSize": 82, + "shasum": "{sha}", + "integrity": "{sha}", + "filename": "workspace-a-1.2.3-a.tgz", + "files": [ + { + "path": "package.json", + "size": 82, + "mode": 420 + } + ], + "entryCount": 1, + "bundled": [] }, - Object { - "_id": "workspace-b@1.2.3-n", - "bugs": Object { - "url": "https://github.com/npm/workspace-b/issues", - }, - "homepage": "https://github.com/npm/workspace-b#readme", + "workspace-b": { + "id": "workspace-b@1.2.3-n", "name": "workspace-b", - "readme": "ERROR: No README data found!", - "repository": Object { - "type": "git", - "url": "git+https://github.com/npm/workspace-b.git", - }, "version": "1.2.3-n", + "size": 171, + "unpackedSize": 92, + "shasum": "{sha}", + "integrity": "{sha}", + "filename": "workspace-b-1.2.3-n.tgz", + "files": [ + { + "path": "package.json", + "size": 92, + "mode": 420 + } + ], + "entryCount": 1, + "bundled": [] }, - Object { - "_id": "workspace-n@1.2.3-n", + "workspace-n": { + "id": "workspace-n@1.2.3-n", "name": "workspace-n", - "readme": "ERROR: No README data found!", "version": "1.2.3-n", - }, -] -` - -exports[`test/lib/commands/publish.js TAP workspaces one workspace > should output one publish 1`] = ` -Array [ - "+ workspace-a@1.2.3-a", -] + "size": 140, + "unpackedSize": 42, + "shasum": "{sha}", + "integrity": "{sha}", + "filename": "workspace-n-1.2.3-n.tgz", + "files": [ + { + "path": "package.json", + "size": 42, + "mode": 420 + } + ], + "entryCount": 1, + "bundled": [] + } +} ` -exports[`test/lib/commands/publish.js TAP workspaces one workspace > should publish given workspace 1`] = ` -Array [ - Object { - "_id": "workspace-a@1.2.3-a", - "name": "workspace-a", - "readme": "ERROR: No README data found!", - "repository": Object { - "type": "git", - "url": "http://repo.workspace-a/", - }, - "version": "1.2.3-a", - }, -] +exports[`test/lib/commands/publish.js TAP workspaces one workspace - success > single workspace 1`] = ` ++ workspace-a@1.2.3-a ` diff --git a/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs b/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs index cd8b0592c36e8e..37349cbe01e7d2 100644 --- a/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/load-all-commands.js.test.cjs @@ -51,7 +51,7 @@ Options: [--omit [--omit ...]] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help audit" for more info ` @@ -164,7 +164,7 @@ Options: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: ddp @@ -314,7 +314,7 @@ Options: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help find-dupes" for more info ` @@ -423,7 +423,7 @@ Options: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: add, i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall @@ -467,7 +467,7 @@ Options: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: it @@ -488,7 +488,7 @@ Options: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: ln @@ -506,7 +506,7 @@ Options: [--omit [--omit ...]] [--link] [--package-lock-only] [--unicode] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: la @@ -550,7 +550,7 @@ Options: [--omit [--omit ...]] [--link] [--package-lock-only] [--unicode] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: list @@ -683,7 +683,7 @@ Options: [--omit [--omit ...]] [--dry-run] [--json] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help prune" for more info ` @@ -711,7 +711,7 @@ npm rebuild [[<@scope>/][@] ...] Options: [-g|--global] [--no-bin-links] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] alias: rb @@ -921,7 +921,7 @@ npm uninstall [<@scope>/]... Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: unlink, remove, rm, r, un @@ -967,7 +967,7 @@ Options: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] -[-ws|--workspaces] [--include-workspace-root] +[-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: up, upgrade, udpate diff --git a/deps/npm/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs index 91f0d782878a3f..ff00f9a0f9b3dd 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs @@ -76,6 +76,7 @@ Array [ "init.license", "init.module", "init.version", + "install-links", "json", "key", "legacy-bundling", @@ -168,6 +169,8 @@ exports[`test/lib/utils/config/definitions.js TAP > config description for _auth * Type: null or String A basic-auth string to use when authenticating against the npm registry. +This will ONLY be used to authenticate against the npm registry. For other +registries you will need to scope it like "//other-registry.tld/:_auth" Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in the @@ -973,6 +976,17 @@ exports[`test/lib/utils/config/definitions.js TAP > config description for init. Alias for \`--init-version\` ` +exports[`test/lib/utils/config/definitions.js TAP > config description for install-links 1`] = ` +#### \`install-links\` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. +` + exports[`test/lib/utils/config/definitions.js TAP > config description for json 1`] = ` #### \`json\` diff --git a/deps/npm/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs index 7b13c34daf6821..6740b94c772c8a 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs @@ -12,6 +12,8 @@ exports[`test/lib/utils/config/describe-all.js TAP > must match snapshot 1`] = ` * Type: null or String A basic-auth string to use when authenticating against the npm registry. +This will ONLY be used to authenticate against the npm registry. For other +registries you will need to scope it like "//other-registry.tld/:_auth" Warning: This should generally not be set via a command-line option. It is safer to use a registry-provided authentication bearer token stored in the @@ -765,6 +767,18 @@ number, if not already set in package.json. +#### \`install-links\` + +* Default: false +* Type: Boolean + +When set file: protocol dependencies that exist outside of the project root +will be packed and installed as regular dependencies instead of creating a +symlink. This option has no effect on workspaces. + + + + #### \`json\` * Default: false diff --git a/deps/npm/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs index 244469f83cd9e3..941def42ee1833 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs @@ -206,7 +206,7 @@ All commands: [--omit [--omit ...]] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help audit" for more info @@ -296,7 +296,7 @@ All commands: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: ddp @@ -426,7 +426,7 @@ All commands: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help find-dupes" for more info @@ -511,7 +511,7 @@ All commands: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: add, i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall @@ -551,7 +551,7 @@ All commands: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: it @@ -570,7 +570,7 @@ All commands: [--omit [--omit ...]] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: ln @@ -586,7 +586,7 @@ All commands: [--omit [--omit ...]] [--link] [--package-lock-only] [--unicode] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: la @@ -624,7 +624,7 @@ All commands: [--omit [--omit ...]] [--link] [--package-lock-only] [--unicode] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: list @@ -739,7 +739,7 @@ All commands: [--omit [--omit ...]] [--dry-run] [--json] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] Run "npm help prune" for more info @@ -763,7 +763,7 @@ All commands: Options: [-g|--global] [--no-bin-links] [--foreground-scripts] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] alias: rb @@ -941,7 +941,7 @@ All commands: Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: unlink, remove, rm, r, un @@ -981,7 +981,7 @@ All commands: [--strict-peer-deps] [--no-package-lock] [--foreground-scripts] [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] - [-ws|--workspaces] [--include-workspace-root] + [-ws|--workspaces] [--include-workspace-root] [--install-links] aliases: up, upgrade, udpate diff --git a/deps/npm/test/fixtures/mock-npm.js b/deps/npm/test/fixtures/mock-npm.js index b6742a425aa9a0..4263dc8fbedc3f 100644 --- a/deps/npm/test/fixtures/mock-npm.js +++ b/deps/npm/test/fixtures/mock-npm.js @@ -118,7 +118,7 @@ const LoadMockNpm = async (t, { mockGlobals(t, { 'process.env.HOME': home, 'process.env.npm_config_cache': cache, - ...(globals ? result(globals, { prefix, cache }) : {}), + ...(globals ? result(globals, { prefix, cache, home }) : {}), // Some configs don't work because they can't be set via npm.config.set until // config is loaded. But some config items are needed before that. So this is // an explicit set of configs that must be loaded as env vars. diff --git a/deps/npm/test/fixtures/mock-registry.js b/deps/npm/test/fixtures/mock-registry.js index 6b6722fcbbc3c3..5890fa7ee93661 100644 --- a/deps/npm/test/fixtures/mock-registry.js +++ b/deps/npm/test/fixtures/mock-registry.js @@ -5,11 +5,13 @@ * for tests against any registry data. */ const pacote = require('pacote') +const npa = require('npm-package-arg') class MockRegistry { #tap #nock #registry #authorization + #basic constructor (opts) { if (!opts.registry) { @@ -17,6 +19,7 @@ class MockRegistry { } this.#registry = (new URL(opts.registry)).origin this.#authorization = opts.authorization + this.#basic = opts.basic // Required for this.package this.#tap = opts.tap } @@ -31,6 +34,9 @@ class MockRegistry { if (this.#authorization) { reqheaders.authorization = `Bearer ${this.#authorization}` } + if (this.#basic) { + reqheaders.authorization = `Basic ${this.#basic}` + } this.#nock = tnock(this.#tap, this.#registry, { reqheaders }) } return this.#nock @@ -40,8 +46,12 @@ class MockRegistry { this.#nock = nock } - whoami ({ username }) { - this.nock = this.nock.get('/-/whoami').reply(200, { username }) + whoami ({ username, body, responseCode = 200, times = 1 }) { + if (username) { + this.nock = this.nock.get('/-/whoami').times(times).reply(responseCode, { username }) + } else { + this.nock = this.nock.get('/-/whoami').times(times).reply(responseCode, body) + } } access ({ spec, access, publishRequires2fa }) { @@ -80,6 +90,16 @@ class MockRegistry { ).reply(200) } + couchuser ({ username, body, responseCode = 200 }) { + if (body) { + this.nock = this.nock.get(`/-/user/org.couchdb.user:${encodeURIComponent(username)}`) + .reply(responseCode, body) + } else { + this.nock = this.nock.get(`/-/user/org.couchdb.user:${encodeURIComponent(username)}`) + .reply(responseCode, { _id: `org.couchdb.user:${username}`, email: '', name: username }) + } + } + couchlogin ({ username, password, email, otp, token = 'npm_default-test-token' }) { this.nock = this.nock .post('/-/v1/login').reply(401, { error: 'You must be logged in to publish packages.' }) @@ -108,7 +128,7 @@ class MockRegistry { } // team can be a team or a username - lsPackages ({ team, packages = {} }) { + lsPackages ({ team, packages = {}, times = 1 }) { if (team.startsWith('@')) { team = team.slice(1) } @@ -119,7 +139,7 @@ class MockRegistry { } else { uri = `/-/org/${encodeURIComponent(scope)}/package` } - this.nock = this.nock.get(uri).query({ format: 'cli' }).reply(200, packages) + this.nock = this.nock.get(uri).query({ format: 'cli' }).times(times).reply(200, packages) } lsCollaborators ({ spec, user, collaborators = {} }) { @@ -152,7 +172,8 @@ class MockRegistry { async package ({ manifest, times = 1, query, tarballs }) { let nock = this.nock - nock = nock.get(`/${manifest.name}`).times(times) + const spec = npa(manifest.name) + nock = nock.get(`/${spec.escapedName}`).times(times) if (query) { nock = nock.query(query) } @@ -169,8 +190,10 @@ class MockRegistry { this.nock = nock } - // the last packument in the packuments array will be tagged as latest - manifest ({ name = 'test-package', packuments } = {}) { + // either pass in packuments if you need to set specific attributes besides version, + // or an array of versions + // the last packument in the packuments or versions array will be tagged latest + manifest ({ name = 'test-package', packuments, versions } = {}) { packuments = this.packuments(packuments, name) const latest = packuments.slice(-1)[0] const manifest = { @@ -184,6 +207,9 @@ class MockRegistry { 'dist-tags': { latest: latest.version }, ...latest, } + if (versions) { + packuments = versions.map(version => ({ version })) + } for (const packument of packuments) { manifest.versions[packument.version] = { @@ -194,6 +220,7 @@ class MockRegistry { dist: { tarball: `${this.#registry}/${name}/-/${name}-${packument.version}.tgz`, }, + maintainers: [], ...packument, } manifest.time[packument.version] = new Date() diff --git a/deps/npm/test/lib/commands/audit.js b/deps/npm/test/lib/commands/audit.js index 1afb8d333b7cea..da6de4774e6b8d 100644 --- a/deps/npm/test/lib/commands/audit.js +++ b/deps/npm/test/lib/commands/audit.js @@ -2,9 +2,9 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm') const MockRegistry = require('../../fixtures/mock-registry.js') -const util = require('util') const zlib = require('zlib') -const gzip = util.promisify(zlib.gzip) +const gzip = zlib.gzipSync +const gunzip = zlib.gunzipSync const path = require('path') const fs = require('fs') @@ -43,7 +43,14 @@ const tree = { }, }, }), - 'test-dep-a': { + 'test-dep-a-vuln': { + 'package.json': JSON.stringify({ + name: 'test-dep-a', + version: '1.0.0', + }), + 'vulnerable.txt': 'vulnerable test-dep-a', + }, + 'test-dep-a-fixed': { 'package.json': JSON.stringify({ name: 'test-dep-a', version: '1.0.1', @@ -66,8 +73,11 @@ t.test('normal audit', async t => { packuments: [{ version: '1.0.0' }, { version: '1.0.1' }], }) await registry.package({ manifest }) - const advisory = registry.advisory({ id: 100 }) - const bulkBody = await gzip(JSON.stringify({ 'test-dep-a': ['1.0.0'] })) + const advisory = registry.advisory({ + id: 100, + vulnerable_versions: '<1.0.1', + }) + const bulkBody = gzip(JSON.stringify({ 'test-dep-a': ['1.0.0'] })) registry.nock.post('/-/npm/v1/security/advisories/bulk', bulkBody) .reply(200, { 'test-dep-a': [advisory], @@ -79,6 +89,55 @@ t.test('normal audit', async t => { t.matchSnapshot(joinedOutput()) }) +t.test('fallback audit ', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { + prefixDir: tree, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: 'test-dep-a', + packuments: [{ version: '1.0.0' }, { version: '1.0.1' }], + }) + await registry.package({ manifest }) + const advisory = registry.advisory({ + id: 100, + module_name: 'test-dep-a', + vulnerable_versions: '<1.0.1', + findings: [{ version: '1.0.0', paths: ['test-dep-a'] }], + }) + registry.nock + .post('/-/npm/v1/security/advisories/bulk').reply(404) + .post('/-/npm/v1/security/audits/quick', body => { + const unzipped = JSON.parse(gunzip(Buffer.from(body, 'hex'))) + return t.match(unzipped, { + name: 'test-dep', + version: '1.0.0', + requires: { 'test-dep-a': '*' }, + dependencies: { 'test-dep-a': { version: '1.0.0' } }, + }) + }).reply(200, { + actions: [], + muted: [], + advisories: { + 100: advisory, + }, + metadata: { + vulnerabilities: { info: 0, low: 0, moderate: 0, high: 1, critical: 0 }, + dependencies: 1, + devDependencies: 0, + optionalDependencies: 0, + totalDependencies: 1, + }, + }) + await npm.exec('audit', []) + t.ok(process.exitCode, 'would have exited uncleanly') + process.exitCode = 0 + t.matchSnapshot(joinedOutput()) +}) + t.test('json audit', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: tree, @@ -97,7 +156,7 @@ t.test('json audit', async t => { }) await registry.package({ manifest }) const advisory = registry.advisory({ id: 100 }) - const bulkBody = await gzip(JSON.stringify({ 'test-dep-a': ['1.0.0'] })) + const bulkBody = gzip(JSON.stringify({ 'test-dep-a': ['1.0.0'] })) registry.nock.post('/-/npm/v1/security/advisories/bulk', bulkBody) .reply(200, { 'test-dep-a': [advisory], @@ -109,7 +168,7 @@ t.test('json audit', async t => { t.matchSnapshot(joinedOutput()) }) -t.test('audit fix', async t => { +t.test('audit fix - bulk endpoint', async t => { const { npm, joinedOutput } = await loadMockNpm(t, { prefixDir: tree, }) @@ -124,20 +183,23 @@ t.test('audit fix', async t => { await registry.package({ manifest, tarballs: { - '1.0.1': path.join(npm.prefix, 'test-dep-a'), + '1.0.1': path.join(npm.prefix, 'test-dep-a-fixed'), }, }) const advisory = registry.advisory({ id: 100, vulnerable_versions: '1.0.0' }) - // Can't validate this request body because it changes with each node - // version/npm version and nock's body validation is not async, while - // zlib.gunzip is - registry.nock.post('/-/npm/v1/security/advisories/bulk') + registry.nock.post('/-/npm/v1/security/advisories/bulk', body => { + const unzipped = JSON.parse(gunzip(Buffer.from(body, 'hex'))) + return t.same(unzipped, { 'test-dep-a': ['1.0.0'] }) + }) .reply(200, { // first audit 'test-dep-a': [advisory], }) - .post('/-/npm/v1/security/advisories/bulk') + .post('/-/npm/v1/security/advisories/bulk', body => { + const unzipped = JSON.parse(gunzip(Buffer.from(body, 'hex'))) + return t.same(unzipped, { 'test-dep-a': ['1.0.1'] }) + }) .reply(200, { // after fix - 'test-dep-a': [advisory], + 'test-dep-a': [], }) await npm.exec('audit', ['fix']) t.matchSnapshot(joinedOutput()) diff --git a/deps/npm/test/lib/commands/deprecate.js b/deps/npm/test/lib/commands/deprecate.js index 37a407c3b6a1a1..03177cb7be0b9d 100644 --- a/deps/npm/test/lib/commands/deprecate.js +++ b/deps/npm/test/lib/commands/deprecate.js @@ -1,137 +1,152 @@ const t = require('tap') +const { load: loadMockNpm } = require('../../fixtures/mock-npm') -let getIdentityImpl = () => 'someperson' -let npmFetchBody = null +const MockRegistry = require('../../fixtures/mock-registry.js') -const npmFetch = async (uri, opts) => { - npmFetchBody = opts.body -} +const user = 'test-user' +const token = 'test-auth-token' +const auth = { '//registry.npmjs.org/:_authToken': token } +const versions = ['1.0.0', '1.0.1', '1.0.1-pre'] -npmFetch.json = async (uri, opts) => { - return { - versions: { - '1.0.0': {}, - '1.0.1': {}, - '1.0.1-pre': {}, - }, - } -} - -const Deprecate = t.mock('../../../lib/commands/deprecate.js', { - '../../../lib/utils/get-identity.js': async () => getIdentityImpl(), - libnpmaccess: { - lsPackages: async () => ({ foo: 'write', bar: 'write', baz: 'write', buzz: 'read' }), - }, - 'npm-registry-fetch': npmFetch, -}) - -const deprecate = new Deprecate({ - flatOptions: { registry: 'https://registry.npmjs.org' }, -}) +// libnpmaccess maps these to read-write and read-only +const packages = { foo: 'write', bar: 'write', baz: 'write', buzz: 'read' } t.test('completion', async t => { - const defaultIdentityImpl = getIdentityImpl - t.teardown(() => { - getIdentityImpl = defaultIdentityImpl + const { npm } = await loadMockNpm(t, { + config: { + ...auth, + }, }) + const deprecate = await npm.cmd('deprecate') const testComp = async (argv, expect) => { const res = await deprecate.completion({ conf: { argv: { remain: argv } } }) t.strictSame(res, expect, `completion: ${argv}`) } + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + + registry.whoami({ username: user, times: 4 }) + registry.lsPackages({ team: user, packages, times: 4 }) await Promise.all([ testComp([], ['foo', 'bar', 'baz']), testComp(['b'], ['bar', 'baz']), testComp(['fo'], ['foo']), testComp(['g'], []), - testComp(['foo', 'something'], []), ]) - getIdentityImpl = () => { - throw new Error('deprecate test failure') - } + await testComp(['foo', 'something'], []) + + registry.whoami({ statusCode: 404, body: {} }) - t.rejects(testComp([], []), { message: 'deprecate test failure' }) + t.rejects(testComp([], []), { code: 'ENEEDAUTH' }) }) t.test('no args', async t => { + const { npm } = await loadMockNpm(t) await t.rejects( - deprecate.exec([]), - /Usage:/, + npm.exec('deprecate', []), + { code: 'EUSAGE' }, 'logs usage' ) }) t.test('only one arg', async t => { + const { npm } = await loadMockNpm(t) await t.rejects( - deprecate.exec(['foo']), - /Usage:/, + npm.exec('deprecate', ['foo']), + { code: 'EUSAGE' }, 'logs usage' ) }) t.test('invalid semver range', async t => { + const { npm } = await loadMockNpm(t) await t.rejects( - deprecate.exec(['foo@notaversion', 'this will fail']), + npm.exec('deprecate', ['foo@notaversion', 'this will fail']), /invalid version range/, 'logs semver error' ) }) t.test('undeprecate', async t => { - t.teardown(() => { - npmFetchBody = null + const { npm, joinedOutput } = await loadMockNpm(t, { config: { ...auth } }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, }) - await deprecate.exec(['foo', '']) - t.match(npmFetchBody, { - versions: { - '1.0.0': { deprecated: '' }, - '1.0.1': { deprecated: '' }, - '1.0.1-pre': { deprecated: '' }, - }, - }, 'undeprecates everything') + const manifest = registry.manifest({ + name: 'foo', + versions, + }) + registry.package({ manifest, query: { write: true } }) + registry.nock.put('/foo', body => { + for (const version of versions) { + if (body.versions[version].deprecated !== '') { + return false + } + } + return true + }).reply(200, {}) + + await npm.exec('deprecate', ['foo', '']) + t.match(joinedOutput(), '') }) t.test('deprecates given range', async t => { - t.teardown(() => { - npmFetchBody = null + const { npm, joinedOutput } = await loadMockNpm(t, { config: { ...auth } }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, }) - - await deprecate.exec(['foo@1.0.0', 'this version is deprecated']) - t.match(npmFetchBody, { - versions: { - '1.0.0': { - deprecated: 'this version is deprecated', - }, - '1.0.1': { - // the undefined here is necessary to ensure that we absolutely - // did not assign this property - deprecated: undefined, - }, - }, + const manifest = registry.manifest({ + name: 'foo', + versions, }) + registry.package({ manifest, query: { write: true } }) + const message = 'test deprecation message' + registry.nock.put('/foo', body => { + if (body.versions['1.0.1'].deprecated) { + return false + } + if (body.versions['1.0.1-pre'].deprecated) { + return false + } + return body.versions['1.0.0'].deprecated === message + }).reply(200, {}) + await npm.exec('deprecate', ['foo@1.0.0', message]) + t.match(joinedOutput(), '') }) t.test('deprecates all versions when no range is specified', async t => { - t.teardown(() => { - npmFetchBody = null + const { npm, joinedOutput } = await loadMockNpm(t, { config: { ...auth } }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, }) - - await deprecate.exec(['foo', 'this version is deprecated']) - - t.match(npmFetchBody, { - versions: { - '1.0.0': { - deprecated: 'this version is deprecated', - }, - '1.0.1': { - deprecated: 'this version is deprecated', - }, - '1.0.1-pre': { - deprecated: 'this version is deprecated', - }, - }, + const manifest = registry.manifest({ + name: 'foo', + versions, }) + registry.package({ manifest, query: { write: true } }) + const message = 'test deprecation message' + registry.nock.put('/foo', body => { + for (const version of versions) { + if (body.versions[version].deprecated !== message) { + return false + } + } + return true + }).reply(200, {}) + + await npm.exec('deprecate', ['foo', message]) + t.match(joinedOutput(), '') }) diff --git a/deps/npm/test/lib/commands/diff.js b/deps/npm/test/lib/commands/diff.js index c2b1a935da8749..0adaa6568d8f7a 100644 --- a/deps/npm/test/lib/commands/diff.js +++ b/deps/npm/test/lib/commands/diff.js @@ -417,7 +417,6 @@ t.test('single arg', t => { const Diff = t.mock('../../../lib/commands/diff.js', { ...mocks, - '../../../lib/utils/read-package-name.js': async () => 'my-project', pacote: { packument: spec => { t.equal(spec.name, 'lorem', 'should have expected spec name') @@ -455,7 +454,6 @@ t.test('single arg', t => { const Diff = t.mock('../../../lib/commands/diff.js', { ...mocks, - '../../../lib/utils/read-package-name.js': async () => 'my-project', '@npmcli/arborist': class { constructor () { throw new Error('ERR') diff --git a/deps/npm/test/lib/commands/exec.js b/deps/npm/test/lib/commands/exec.js index 1f7230d25b6544..d6e598d568d5b3 100644 --- a/deps/npm/test/lib/commands/exec.js +++ b/deps/npm/test/lib/commands/exec.js @@ -190,9 +190,14 @@ t.test('npx foo, bin already exists globally', async t => { t.test('npm exec foo, already present locally', async t => { const path = t.testdir() + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -339,10 +344,18 @@ t.test('npm exec foo, not present locally or in central loc', async t => { const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -375,12 +388,21 @@ t.test('npm exec foo, not present locally or in central loc', async t => { t.test('npm exec foo, not present locally but in central loc', async t => { const path = t.testdir() const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -413,12 +435,21 @@ t.test('npm exec foo, not present locally but in central loc', async t => { t.test('npm exec foo, present locally but wrong version', async t => { const path = t.testdir() const installDir = resolve('npx-cache-dir/2badf4630f1cfaad') + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS['foo@2.x'] = { name: 'foo', @@ -448,11 +479,63 @@ t.test('npm exec foo, present locally but wrong version', async t => { ]) }) +t.test('npm exec foo, present locally but outdated version', async t => { + const path = t.testdir() + const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } + npm.localPrefix = path + ARB_ACTUAL_TREE[path] = { + inventory: { + query () { + return new Set() + }, + }, + } + ARB_ACTUAL_TREE[installDir] = { + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, + } + MANIFESTS.foo = { + name: 'foo', + version: '2.3.4', + bin: { + foo: 'foo', + }, + _from: 'foo@2.x', + } + await exec.exec(['foo', 'one arg', 'two arg']) + t.strictSame(MKDIRPS, [installDir], 'need to make install dir') + t.match(ARB_CTOR, [{ path }]) + t.match(ARB_REIFY, [{ add: ['foo'], legacyPeerDeps: false }], 'need to add foo@2.x') + t.equal(PROGRESS_ENABLED, true, 'progress re-enabled') + const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}` + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: process.cwd(), + stdioString: true, + event: 'npx', + env: { PATH }, + stdio: 'inherit', + }, + ]) +}) + t.test('npm exec --package=foo bar', async t => { const path = t.testdir() + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -499,9 +582,18 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { }, }, }) + const pkg = { + name: '@foo/bar', + version: '1.2.3', + bin: { foo: 'foo', bar: 'bar' }, + } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS['@foo/bar'] = foobarManifest await exec.exec(['@foo/bar', '--some=arg']) @@ -526,7 +618,7 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => { t.test( 'npm exec @foo/bar, with same bin alias and no unscoped named bin, locally installed', async t => { - const foobarManifest = { + const pkg = { name: '@foo/bar', version: '1.2.3', bin: { @@ -538,15 +630,19 @@ t.test( const path = t.testdir({ node_modules: { '@foo/bar': { - 'package.json': JSON.stringify(foobarManifest), + 'package.json': JSON.stringify(pkg), }, }, }) npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } - MANIFESTS['@foo/bar'] = foobarManifest + MANIFESTS['@foo/bar'] = pkg await exec.exec(['@foo/bar', 'one arg', 'two arg']) t.strictSame(MKDIRPS, [], 'no need to make any dirs') t.match(ARB_CTOR, [{ path }]) @@ -571,9 +667,22 @@ t.test( 'npm exec @foo/bar, with different bin alias and no unscoped named bin, locally installed', async t => { const path = t.testdir() + const pkg = { + name: '@foo/bar', + version: '1.2.3.', + bin: { foo: 'qux', corge: 'qux', baz: 'quux' }, + } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ + ...pkg, + package: pkg, + pkgid: `${pkg.name}@${pkg.version}`, + }]) + }, + }, } MANIFESTS['@foo/bar'] = { name: '@foo/bar', @@ -609,10 +718,18 @@ t.test('run command with 2 packages, need install, verify sort', async t => { const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -653,10 +770,25 @@ t.test('run command with 2 packages, need install, verify sort', async t => { }) t.test('npm exec foo, no bin in package', async t => { - const path = t.testdir() + const pkg = { name: 'foo', version: '1.2.3' } + const path = t.testdir({ + node_modules: { + foo: { + 'package.json': JSON.stringify(pkg), + }, + }, + }) npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ + ...pkg, + package: pkg, + pkgid: `${pkg.name}@${pkg.version}`, + }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -672,9 +804,22 @@ t.test('npm exec foo, no bin in package', async t => { t.test('npm exec foo, many bins in package, none named foo', async t => { const path = t.testdir() + const pkg = { + name: 'foo', + version: '1.2.3', + bin: { bar: 'bar', baz: 'baz' }, + } npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ + ...pkg, + package: pkg, + pkgid: `${pkg.name}@${pkg.version}`, + }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -694,11 +839,16 @@ t.test('npm exec foo, many bins in package, none named foo', async t => { t.test('npm exec -p foo -c "ls -laF"', async t => { const path = t.testdir() + const pkg = { name: 'foo', version: '1.2.3' } npm.localPrefix = path config.package = ['foo'] config.call = 'ls -laF' ARB_ACTUAL_TREE[path] = { - children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]), + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -751,10 +901,18 @@ t.test('prompt when installs are needed if not already present and shell is a TT const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -823,10 +981,18 @@ t.test( const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -896,10 +1062,18 @@ t.test( const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -957,10 +1131,18 @@ t.test('abort if prompt rejected', async t => { const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -1014,10 +1196,18 @@ t.test('abort if prompt false', async t => { const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -1070,10 +1260,18 @@ t.test('abort if -n provided', async t => { const installDir = resolve('npx-cache-dir/07de77790e5f40f2') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -1105,10 +1303,18 @@ t.test('forward legacyPeerDeps opt', async t => { const installDir = resolve('npx-cache-dir/f7fbba6e0636f890') npm.localPrefix = path ARB_ACTUAL_TREE[path] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } ARB_ACTUAL_TREE[installDir] = { - children: new Map(), + inventory: { + query () { + return new Set() + }, + }, } MANIFESTS.foo = { name: 'foo', @@ -1128,10 +1334,11 @@ t.test('forward legacyPeerDeps opt', async t => { ) }) -t.test('workspaces', t => { +t.test('workspaces', async t => { npm.localPrefix = t.testdir({ node_modules: { '.bin': { + a: '', foo: '', }, }, @@ -1158,69 +1365,125 @@ t.test('workspaces', t => { }), }) + const pkg = { name: 'foo', version: '1.2.3', bin: { foo: 'foo' } } PROGRESS_IGNORED = true - npm.localBin = resolve(npm.localPrefix, 'node_modules/.bin') + npm.localBin = resolve(npm.localPrefix, 'node_modules', '.bin') - t.test('with args, run scripts in the context of a workspace', async t => { - await exec.execWorkspaces(['foo', 'one arg', 'two arg'], ['a', 'b']) + // with arg matching existing bin, run scripts in the context of a workspace + await exec.execWorkspaces(['foo', 'one arg', 'two arg'], ['a', 'b']) - t.match(RUN_SCRIPTS, [ - { - pkg: { scripts: { npx: 'foo' } }, - args: ['one arg', 'two arg'], - banner: false, - path: process.cwd(), - stdioString: true, - event: 'npx', - env: { - PATH: [npm.localBin, process.env.PATH].join(delimiter), - }, - stdio: 'inherit', + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: npm.localPrefix, + stdioString: true, + event: 'npx', + env: { + PATH: [npm.localBin, process.env.PATH].join(delimiter), }, - ]) - }) + stdio: 'inherit', + }, + { + pkg: { scripts: { npx: 'foo' } }, + args: ['one arg', 'two arg'], + banner: false, + path: npm.localPrefix, + stdioString: true, + event: 'npx', + env: { + PATH: [npm.localBin, process.env.PATH].join(delimiter), + }, + stdio: 'inherit', + }, + ], 'should run with multiple args across multiple workspaces') - t.test('no args, spawn interactive shell', async t => { - CI_NAME = null - process.stdin.isTTY = true + // clean up + RUN_SCRIPTS.length = 0 - await exec.execWorkspaces([], ['a']) + // with packages, run scripts in the context of a workspace + config.package = ['foo'] + config.call = 'foo' + config.yes = false - t.strictSame(LOG_WARN, []) - t.strictSame( - npm._mockOutputs, + ARB_ACTUAL_TREE[npm.localPrefix] = { + inventory: { + query () { + return new Set([{ ...pkg, package: pkg }]) + }, + }, + } + + await exec.execWorkspaces([], ['a', 'b']) + + // path should point to the workspace folder + t.match(RUN_SCRIPTS, [ + { + pkg: { scripts: { npx: 'foo' } }, + args: [], + banner: false, + path: resolve(npm.localPrefix, 'packages', 'a'), + stdioString: true, + event: 'npx', + stdio: 'inherit', + }, + { + pkg: { scripts: { npx: 'foo' } }, + args: [], + banner: false, + path: resolve(npm.localPrefix, 'packages', 'b'), + stdioString: true, + event: 'npx', + stdio: 'inherit', + }, + ], 'should run without args in multiple workspaces') + + t.match(ARB_CTOR, [ + { path: npm.localPrefix }, + { path: npm.localPrefix }, + ]) + + // no args, spawn interactive shell + CI_NAME = null + config.package = [] + config.call = '' + process.stdin.isTTY = true + + await exec.execWorkspaces([], ['a']) + + t.strictSame(LOG_WARN, []) + t.strictSame( + npm._mockOutputs, + [ [ - [ - `\nEntering npm script environment in workspace a@1.0.0 at location:\n${resolve( - npm.localPrefix, - 'packages/a' - )}\nType 'exit' or ^D when finished\n`, - ], + `\nEntering npm script environment in workspace a@1.0.0 at location:\n${resolve( + npm.localPrefix, + 'packages/a' + )}\nType 'exit' or ^D when finished\n`, ], - 'printed message about interactive shell' - ) + ], + 'printed message about interactive shell' + ) - npm.color = true - flatOptions.color = true - npm._mockOutputs.length = 0 - await exec.execWorkspaces([], ['a']) + npm.color = true + flatOptions.color = true + npm._mockOutputs.length = 0 + await exec.execWorkspaces([], ['a']) - t.strictSame(LOG_WARN, []) - t.strictSame( - npm._mockOutputs, + t.strictSame(LOG_WARN, []) + t.strictSame( + npm._mockOutputs, + [ [ - [ + /* eslint-disable-next-line max-len */ + `\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m in workspace \u001b[32ma@1.0.0\u001b[39m at location:\u001b[0m\n\u001b[0m\u001b[2m${resolve( + npm.localPrefix, + 'packages/a' /* eslint-disable-next-line max-len */ - `\u001b[0m\u001b[0m\n\u001b[0mEntering npm script environment\u001b[0m\u001b[0m in workspace \u001b[32ma@1.0.0\u001b[39m at location:\u001b[0m\n\u001b[0m\u001b[2m${resolve( - npm.localPrefix, - 'packages/a' - /* eslint-disable-next-line max-len */ - )}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`, - ], + )}\u001b[22m\u001b[0m\u001b[1m\u001b[22m\n\u001b[1mType 'exit' or ^D when finished\u001b[22m\n\u001b[1m\u001b[22m`, ], - 'printed message about interactive shell' - ) - }) - - t.end() + ], + 'printed message about interactive shell' + ) }) diff --git a/deps/npm/test/lib/commands/install.js b/deps/npm/test/lib/commands/install.js index afb6adb4fb0a56..9b2d52f6edd218 100644 --- a/deps/npm/test/lib/commands/install.js +++ b/deps/npm/test/lib/commands/install.js @@ -139,6 +139,23 @@ t.test('should install globally using Arborist', async t => { t.strictSame(SCRIPTS, [], 'no scripts when installing globally') }) +t.test('should not install invalid global package name', async t => { + const { npm } = await loadMockNpm(t, { + '@npmcli/run-script': () => {}, + '../../lib/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function (args) { + throw new Error('should not reify') + }, + }) + npm.config.set('global', true) + npm.globalPrefix = path.resolve(t.testdir({})) + await t.rejects( + npm.exec('install', ['']), + /Usage:/, + 'should not install invalid package name' + ) +}) + t.test('npm i -g npm engines check success', async t => { const { npm } = await loadMockNpm(t, { '../../lib/utils/reify-finish.js': async () => {}, diff --git a/deps/npm/test/lib/commands/owner.js b/deps/npm/test/lib/commands/owner.js index eadfa2bf08b560..d80ce36fece983 100644 --- a/deps/npm/test/lib/commands/owner.js +++ b/deps/npm/test/lib/commands/owner.js @@ -1,760 +1,502 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm.js') +const { load: loadMockNpm } = require('../../fixtures/mock-npm.js') +const MockRegistry = require('../../fixtures/mock-registry.js') -let result = '' -let readPackageNamePrefix = null -let readPackageNameResponse = null +const npa = require('npm-package-arg') +const packageName = '@npmcli/test-package' +const spec = npa(packageName) +const auth = { '//registry.npmjs.org/:_authToken': 'test-auth-token' } -const noop = () => null +const maintainers = [ + { email: 'test-user-a@npmjs.org', name: 'test-user-a' }, + { email: 'test-user-b@npmjs.org', name: 'test-user-b' }, +] -const npm = mockNpm({ - output: (msg) => { - result = result ? `${result}\n${msg}` : msg - }, +t.test('owner no args', async t => { + const { npm } = await loadMockNpm(t) + await t.rejects( + npm.exec('owner', []), + { code: 'EUSAGE' }, + 'rejects with usage' + ) }) -const npmFetch = { json: noop } -const log = { error: noop, info: noop, verbose: noop } -const pacote = { packument: noop } - -const mocks = { - 'proc-log': log, - 'npm-registry-fetch': npmFetch, - pacote, - '../../../lib/utils/read-package-name.js': async (prefix) => { - readPackageNamePrefix = prefix - return readPackageNameResponse - }, -} - -const npmcliMaintainers = [ - { email: 'quitlahok@gmail.com', name: 'nlf' }, - { email: 'ruyadorno@hotmail.com', name: 'ruyadorno' }, - { email: 'darcy@darcyclarke.me', name: 'darcyclarke' }, - { email: 'i@izs.me', name: 'isaacs' }, -] - -const Owner = t.mock('../../../lib/commands/owner.js', mocks) -const owner = new Owner(npm) +t.test('owner ls no args', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: packageName }), + }, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) -t.test('owner no args', async t => { - result = '' - t.teardown(() => { - result = '' + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], }) + registry.package({ manifest }) - await t.rejects( - owner.exec([]), - owner.usage) + await npm.exec('owner', ['ls']) + t.match(joinedOutput(), maintainers.map(m => `${m.name} <${m.email}>`).join('\n')) }) -t.test('owner ls no args', async t => { - t.plan(4) - - result = '' - - readPackageNameResponse = '@npmcli/map-workspaces' - pacote.packument = async (spec, opts) => { - t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') - t.match( - opts, - { - ...npm.flatOptions, - fullMetadata: true, - }, - 'should forward expected options to pacote.packument' - ) - return { maintainers: npmcliMaintainers } - } - t.teardown(() => { - npm.prefix = null - result = '' - pacote.packument = noop - readPackageNameResponse = null - }) - npm.prefix = 'test-npm-prefix' - - await owner.exec(['ls']) - t.matchSnapshot(result, 'should output owners of cwd package') - t.equal(readPackageNamePrefix, 'test-npm-prefix', 'read-package-name gets npm.prefix') +t.test('local package.json has no name', async t => { + const { npm } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ hello: 'world' }), + }, + }) + await t.rejects( + npm.exec('owner', ['ls']), + { code: 'EUSAGE' } + ) }) t.test('owner ls global', async t => { - t.teardown(() => { - npm.config.set('global', false) + const { npm } = await loadMockNpm(t, { + config: { global: true }, }) - npm.config.set('global', true) await t.rejects( - owner.exec(['ls']), - owner.usage + npm.exec('owner', ['ls']), + { code: 'EUSAGE' }, + 'rejects with usage' ) }) t.test('owner ls no args no cwd package', async t => { - result = '' - t.teardown(() => { - result = '' - log.error = noop - }) + const { npm } = await loadMockNpm(t) await t.rejects( - owner.exec(['ls']), - owner.usage + npm.exec('owner', ['ls']) ) }) t.test('owner ls fails to retrieve packument', async t => { - t.plan(4) - - result = '' - readPackageNameResponse = '@npmcli/map-workspaces' - pacote.packument = () => { - throw new Error('ERR') - } - log.error = (title, msg, pkgName) => { - t.equal(title, 'owner ls', 'should list npm owner ls title') - t.equal(msg, "Couldn't get owner data", 'should use expected msg') - t.equal(pkgName, '@npmcli/map-workspaces', 'should use pkg name') - } - t.teardown(() => { - result = '' - log.error = noop - pacote.packument = noop - }) - - await t.rejects( - owner.exec(['ls']), - /ERR/, - 'should throw unknown error' - ) + const { npm, logs } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: packageName }), + }, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + registry.nock.get(`/${spec.escapedName}`).reply(404) + await t.rejects(npm.exec('owner', ['ls'])) + t.match(logs.error, [['owner ls', "Couldn't get owner data", '@npmcli/test-package']]) }) t.test('owner ls ', async t => { - t.plan(3) - - result = '' - pacote.packument = async (spec, opts) => { - t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') - t.match( - opts, - { - ...npm.flatOptions, - fullMetadata: true, - }, - 'should forward expected options to pacote.packument' - ) - return { maintainers: npmcliMaintainers } - } - t.teardown(() => { - result = '' - pacote.packument = noop - }) - - await owner.exec(['ls', '@npmcli/map-workspaces']) - t.matchSnapshot(result, 'should output owners of ') + const { npm, joinedOutput } = await loadMockNpm(t) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.package({ manifest }) + + await npm.exec('owner', ['ls', packageName]) + t.match(joinedOutput(), maintainers.map(m => `${m.name} <${m.email}>`).join('\n')) }) t.test('owner ls no maintainers', async t => { - result = '' - pacote.packument = async (spec, opts) => { - return { maintainers: [] } - } - t.teardown(() => { - result = '' - pacote.packument = noop + const { npm, joinedOutput } = await loadMockNpm(t) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), }) + const manifest = registry.manifest({ + name: packageName, + versions: ['1.0.0'], + }) + registry.package({ manifest }) - await owner.exec(['ls', '@npmcli/map-workspaces']) - t.equal(result, 'no admin found', 'should output no admint found msg') + await npm.exec('owner', ['ls', packageName]) + t.equal(joinedOutput(), 'no admin found') }) t.test('owner add ', async t => { - t.plan(8) - - result = '' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - t.ok('should request user info') - t.match(opts, { ...npm.flatOptions }, 'should use expected opts') - return { - _id: 'org.couchdb.user:foo', - email: 'foo@github.com', - name: 'foo', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - t.ok('should put changed owner') - t.match(opts, { - ...npm.flatOptions, - method: 'PUT', - body: { - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - }, - spec: { - name: '@npmcli/map-workspaces', - }, - }, 'should use expected opts') - t.same( - opts.body.maintainers, - [ - ...npmcliMaintainers, - { - name: 'foo', - email: 'foo@github.com', - }, - ], - 'should contain expected new owners, adding requested user' - ) - return {} - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => { - t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') - t.match( - opts, - { - ...npm.flatOptions, - fullMetadata: true, - }, - 'should forward expected options to pacote.packument' - ) - return { - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - } - } - t.teardown(() => { - result = '' - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['add', 'foo', '@npmcli/map-workspaces']) - t.equal(result, '+ foo (@npmcli/map-workspaces)', 'should output add result') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { ...auth }, + }) + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`, body => { + t.match(body, { + _id: manifest._id, + _rev: manifest._rev, + maintainers: [ + ...manifest.maintainers, + { name: username, email: '' }, + ], + }) + return true + }).reply(200, {}) + await npm.exec('owner', ['add', username, packageName]) + t.equal(joinedOutput(), `+ ${username} (${packageName})`) }) t.test('owner add cwd package', async t => { - result = '' - readPackageNameResponse = '@npmcli/map-workspaces' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - return { - _id: 'org.couchdb.user:foo', - email: 'foo@github.com', - name: 'foo', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - return {} - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - }) - t.teardown(() => { - result = '' - readPackageNameResponse = null - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['add', 'foo']) - t.equal(result, '+ foo (@npmcli/map-workspaces)', 'should output add result') + const { npm, joinedOutput } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: packageName }), + }, + config: { ...auth }, + }) + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`, body => { + t.match(body, { + _id: manifest._id, + _rev: manifest._rev, + maintainers: [ + ...manifest.maintainers, + { name: username, email: '' }, + ], + }) + return true + }).reply(200, {}) + await npm.exec('owner', ['add', username]) + t.equal(joinedOutput(), `+ ${username} (${packageName})`) }) t.test('owner add already an owner', async t => { - t.plan(2) - - result = '' - log.info = (title, msg) => { - t.equal(title, 'owner add', 'should use expected title') - t.equal( - msg, - 'Already a package owner: ruyadorno ', - 'should log already package owner info message' - ) - } - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:ruyadorno') { - return { - _id: 'org.couchdb.user:ruyadorno', - email: 'ruyadorno@hotmail.com', - name: 'ruyadorno', - } - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => { - return { - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - } - } - t.teardown(() => { - result = '' - log.info = noop - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['add', 'ruyadorno', '@npmcli/map-workspaces']) + const { npm, joinedOutput, logs } = await loadMockNpm(t, { + config: { ...auth }, + }) + const username = maintainers[0].name + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + await npm.exec('owner', ['add', username, packageName]) + t.equal(joinedOutput(), '') + t.match( + logs.info, + [['owner add', 'Already a package owner: test-user-a ']] + ) }) t.test('owner add fails to retrieve user', async t => { - result = '' - readPackageNameResponse = - npmFetch.json = async (uri, opts) => { - // retrieve borked user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - return { ok: false } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - return {} - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, + const { npm, logs } = await loadMockNpm(t, { + config: { ...auth }, }) - t.teardown(() => { - result = '' - readPackageNameResponse = null - npmFetch.json = noop - pacote.packument = noop + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), }) - - await t.rejects( - owner.exec(['add', 'foo', '@npmcli/map-workspaces']), - { code: 'EOWNERUSER', message: /Couldn't get user data for foo: {"ok":false}/ }, - 'should throw user data error' - ) + registry.couchuser({ username, responseCode: 404, body: {} }) + await t.rejects(npm.exec('owner', ['add', username, packageName])) + t.match(logs.error, [['owner mutate', `Error getting user data for ${username}`]]) }) t.test('owner add fails to PUT updates', async t => { - result = '' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - return { - _id: 'org.couchdb.user:foo', - email: 'foo@github.com', - name: 'foo', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - return { - error: { - status: '418', - message: "I'm a teapot", - }, - } - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, + const { npm } = await loadMockNpm(t, { + config: { ...auth }, }) - t.teardown(() => { - result = '' - npmFetch.json = noop - pacote.packument = noop - }) - - await t.rejects( - owner.exec(['add', 'foo', '@npmcli/map-workspaces']), - { code: 'EOWNERMUTATE', message: /Failed to update package/ }, - 'should throw failed to update package error' - ) -}) - -t.test('owner add fails to retrieve user info', async t => { - t.plan(3) - - result = '' - log.error = (title, msg) => { - t.equal(title, 'owner mutate', 'should use expected title') - t.equal(msg, 'Error getting user data for foo') - } - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - throw Object.assign( - new Error("I'm a teapot"), - { status: 418 } - ) - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), }) - t.teardown(() => { - result = '' - log.error = noop - npmFetch.json = noop - pacote.packument = noop + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], }) - + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`).reply(404, {}) await t.rejects( - owner.exec(['add', 'foo', '@npmcli/map-workspaces']), - "I'm a teapot", - 'should throw server error response' + npm.exec('owner', ['add', username, packageName]), + { code: 'EOWNERMUTATE' } ) }) t.test('owner add no previous maintainers property from server', async t => { - result = '' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - return { - _id: 'org.couchdb.user:foo', - email: 'foo@github.com', - name: 'foo', - } - } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') { - return {} - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => { - return { - _rev: '1-foobaaa1', - maintainers: null, - } - } - t.teardown(() => { - result = '' - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['add', 'foo', '@npmcli/no-owners-pkg']) - t.equal(result, '+ foo (@npmcli/no-owners-pkg)', 'should output add result') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { ...auth }, + }) + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers: undefined, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`, body => { + t.match(body, { + _id: manifest._id, + _rev: manifest._rev, + maintainers: [{ name: username, email: '' }], + }) + return true + }).reply(200, {}) + await npm.exec('owner', ['add', username, packageName]) + t.equal(joinedOutput(), `+ ${username} (${packageName})`) }) t.test('owner add no user', async t => { - result = '' - t.teardown(() => { - result = '' - }) + const { npm } = await loadMockNpm(t) await t.rejects( - owner.exec(['add']), - owner.usage + npm.exec('owner', ['add']), + { code: 'EUSAGE' } ) }) -t.test('owner add no pkg global', async t => { - t.teardown(() => { - npm.config.set('global', false) +t.test('owner add no pkg global', async t => { + const { npm } = await loadMockNpm(t, { + config: { global: true }, }) - npm.config.set('global', true) await t.rejects( - owner.exec(['add', 'gar']), - owner.usage + npm.exec('owner', ['add', 'foo']), + { code: 'EUSAGE' } ) }) t.test('owner add no cwd package', async t => { - result = '' - t.teardown(() => { - result = '' - }) + const { npm } = await loadMockNpm(t) await t.rejects( - owner.exec(['add', 'foo']), - owner.usage + npm.exec('owner', ['add', 'foo']), + { code: 'EUSAGE' } ) }) t.test('owner rm ', async t => { - t.plan(8) - - result = '' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:ruyadorno') { - t.ok('should request user info') - t.match(opts, { ...npm.flatOptions }, 'should use expected opts') - return { - _id: 'org.couchdb.user:ruyadorno', - email: 'ruyadorno@hotmail.com', - name: 'ruyadorno', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - t.ok('should put changed owner') - t.match(opts, { - ...npm.flatOptions, - method: 'PUT', - body: { - _rev: '1-foobaaa1', - }, - spec: { - name: '@npmcli/map-workspaces', - }, - }, 'should use expected opts') - t.same( - opts.body.maintainers, - npmcliMaintainers.filter(m => m.name !== 'ruyadorno'), - 'should contain expected new owners, removing requested user' - ) - return {} - } else { - t.fail(`unexpected fetch json call to: ${uri}`) - } - } - pacote.packument = async (spec, opts) => { - t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name') - t.match( - opts, - { - ...npm.flatOptions, - fullMetadata: true, - }, - 'should forward expected options to pacote.packument' - ) - return { - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - } - } - t.teardown(() => { - result = '' - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['rm', 'ruyadorno', '@npmcli/map-workspaces']) - t.equal(result, '- ruyadorno (@npmcli/map-workspaces)', 'should output rm result') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { ...auth }, + }) + const username = maintainers[0].name + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`, body => { + t.match(body, { + _id: manifest._id, + _rev: manifest._rev, + maintainers: maintainers.slice(1), + }) + return true + }).reply(200, {}) + await npm.exec('owner', ['rm', username, packageName]) + t.equal(joinedOutput(), `- ${username} (${packageName})`) }) t.test('owner rm not a current owner', async t => { - t.plan(2) - - result = '' - log.info = (title, msg) => { - t.equal(title, 'owner rm', 'should log expected title') - t.equal(msg, 'Not a package owner: foo', 'should log.info not a package owner msg') - } - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:foo') { - return { - _id: 'org.couchdb.user:foo', - email: 'foo@github.com', - name: 'foo', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - return {} - } else { - t.fail(`unexpected fetch json call to: ${uri}`) - } - } - pacote.packument = async (spec, opts) => { - return { - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - } - } - t.teardown(() => { - result = '' - log.info = noop - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['rm', 'foo', '@npmcli/map-workspaces']) + const { npm, logs } = await loadMockNpm(t, { + config: { ...auth }, + }) + const username = 'foo' + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + await npm.exec('owner', ['rm', username, packageName]) + t.match(logs.info, [['owner rm', `Not a package owner: ${username}`]]) }) t.test('owner rm cwd package', async t => { - result = '' - readPackageNameResponse = '@npmcli/map-workspaces' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:ruyadorno') { - return { - _id: 'org.couchdb.user:ruyadorno', - email: 'ruyadorno@hotmail.com', - name: 'ruyadorno', - } - } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') { - return {} - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: npmcliMaintainers, - }) - t.teardown(() => { - result = '' - readPackageNameResponse = null - npmFetch.json = noop - pacote.packument = noop - }) - - await owner.exec(['rm', 'ruyadorno']) - t.equal(result, '- ruyadorno (@npmcli/map-workspaces)', 'should output rm result') + const { npm, joinedOutput } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: packageName }), + }, + config: { ...auth }, + }) + const username = maintainers[0].name + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) + registry.nock.put(`/${spec.escapedName}/-rev/${manifest._rev}`, body => { + t.match(body, { + _id: manifest._id, + _rev: manifest._rev, + maintainers: maintainers.slice(1), + }) + return true + }).reply(200, {}) + await npm.exec('owner', ['rm', username]) + t.equal(joinedOutput(), `- ${username} (${packageName})`) }) t.test('owner rm only user', async t => { - result = '' - readPackageNameResponse = 'ipt' - npmFetch.json = async (uri, opts) => { - // retrieve user info from couchdb request - if (uri === '/-/user/org.couchdb.user:ruyadorno') { - return { - _id: 'org.couchdb.user:ruyadorno', - email: 'ruyadorno@hotmail.com', - name: 'ruyadorno', - } - } else { - t.fail(`unexpected fetch json call to uri: ${uri}`) - } - } - pacote.packument = async (spec, opts) => ({ - _rev: '1-foobaaa1', - maintainers: [{ - name: 'ruyadorno', - email: 'ruyadorno@hotmail.com', - }], - }) - t.teardown(() => { - result = '' - readPackageNameResponse = null - npmFetch.json = noop - pacote.packument = noop - }) - + const { npm } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify({ name: packageName }), + }, + config: { ...auth }, + }) + const username = maintainers[0].name + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers: maintainers.slice(0, 1), version: '1.0.0' }], + }) + registry.couchuser({ username }) + registry.package({ manifest }) await t.rejects( - owner.exec(['rm', 'ruyadorno']), - { code: 'EOWNERRM', message: 'Cannot remove all owners of a package. Add someone else first.' }, - 'should throw unable to remove unique owner message' + npm.exec('owner', ['rm', username]), + { + code: 'EOWNERRM', + message: 'Cannot remove all owners of a package. Add someone else first.', + } ) }) t.test('owner rm no user', async t => { - result = '' - t.teardown(() => { - result = '' - }) - + const { npm } = await loadMockNpm(t) await t.rejects( - owner.exec(['rm']), - owner.usage + npm.exec('owner', ['rm']), + { code: 'EUSAGE' } ) }) t.test('owner rm no pkg global', async t => { - t.teardown(() => { - npm.config.set('global', false) + const { npm } = await loadMockNpm(t, { + config: { global: true }, }) - npm.config.set('global', true) - await t.rejects( - owner.exec(['rm', 'foo']), - owner.usage + npm.exec('owner', ['rm', 'foo']), + { code: 'EUSAGE' } ) }) t.test('owner rm no cwd package', async t => { - result = '' - t.teardown(() => { - result = '' - }) - + const { npm } = await loadMockNpm(t) await t.rejects( - owner.exec(['rm', 'foo']), - owner.usage + npm.exec('owner', ['rm', 'foo']), + { code: 'EUSAGE' } ) }) t.test('completion', async t => { - const testComp = async (argv, expect) => { - const res = await owner.completion({ conf: { argv: { remain: argv } } }) - t.strictSame(res, expect, argv.join(' ')) - } - - await Promise.all([ - testComp(['npm', 'foo'], []), - testComp(['npm', 'owner'], ['add', 'rm', 'ls']), - testComp(['npm', 'owner', 'add'], []), - testComp(['npm', 'owner', 'ls'], []), - testComp(['npm', 'owner', 'rm', 'foo'], []), - ]) - - // npm owner rm completion is async - t.test('completion npm owner rm', async t => { - t.plan(2) - readPackageNameResponse = '@npmcli/map-workspaces' - pacote.packument = async spec => { - t.equal(spec.name, readPackageNameResponse, 'should use package spec') - return { - maintainers: npmcliMaintainers, - } + t.test('basic commands', async t => { + const { npm } = await loadMockNpm(t) + const owner = await npm.cmd('owner') + const testComp = async (argv, expect) => { + const res = await owner.completion({ conf: { argv: { remain: argv } } }) + t.strictSame(res, expect, argv.join(' ')) } - t.teardown(() => { - readPackageNameResponse = null - pacote.packument = noop - }) + await Promise.all([ + testComp(['npm', 'foo'], []), + testComp(['npm', 'owner'], ['add', 'rm', 'ls']), + testComp(['npm', 'owner', 'add'], []), + testComp(['npm', 'owner', 'ls'], []), + testComp(['npm', 'owner', 'rm', 'foo'], []), + ]) + }) + + t.test('completion npm owner rm', async t => { + const { npm } = await loadMockNpm(t, { + prefixDir: { 'package.json': JSON.stringify({ name: packageName }) }, + }) + const owner = await npm.cmd('owner') + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers, version: '1.0.0' }], + }) + registry.package({ manifest }) const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } }) - t.strictSame(res, - ['nlf', 'ruyadorno', 'darcyclarke', 'isaacs'], - 'should return list of current owners' - ) + t.strictSame(res, maintainers.map(m => m.name), 'should return list of current owners') }) t.test('completion npm owner rm no cwd package', async t => { + const { npm } = await loadMockNpm(t) + const owner = await npm.cmd('owner') const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } }) t.strictSame(res, [], 'should have no owners to autocomplete if not cwd package') - t.end() }) t.test('completion npm owner rm global', async t => { - t.teardown(() => { - npm.config.set('global', false) + const { npm } = await loadMockNpm(t, { + config: { global: true }, }) - npm.config.set('global', true) + const owner = await npm.cmd('owner') const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } }) t.strictSame(res, [], 'should have no owners to autocomplete if global') - t.end() }) t.test('completion npm owner rm no owners found', async t => { - t.plan(2) - readPackageNameResponse = '@npmcli/map-workspaces' - pacote.packument = async spec => { - t.equal(spec.name, readPackageNameResponse, 'should use package spec') - return { - maintainers: [], - } - } - t.teardown(() => { - readPackageNameResponse = null - pacote.packument = noop + const { npm } = await loadMockNpm(t, { + prefixDir: { 'package.json': JSON.stringify({ name: packageName }) }, }) + const owner = await npm.cmd('owner') + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + }) + const manifest = registry.manifest({ + name: packageName, + packuments: [{ maintainers: [], version: '1.0.0' }], + }) + registry.package({ manifest }) const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } }) t.strictSame(res, [], 'should return no owners if not found') }) - - t.end() }) diff --git a/deps/npm/test/lib/commands/publish.js b/deps/npm/test/lib/commands/publish.js index 64eb7c60cb0621..3cbe962382e21b 100644 --- a/deps/npm/test/lib/commands/publish.js +++ b/deps/npm/test/lib/commands/publish.js @@ -1,271 +1,190 @@ const t = require('tap') -const { fake: mockNpm } = require('../../fixtures/mock-npm') -const fs = require('fs') +const { load: loadMockNpm } = require('../../fixtures/mock-npm') +const MockRegistry = require('../../fixtures/mock-registry.js') +const pacote = require('pacote') +const path = require('path') +const fs = require('@npmcli/fs') +const npa = require('npm-package-arg') + +const pkg = 'test-package' +const token = 'test-auth-token' +const auth = { '//registry.npmjs.org/:_authToken': token } +const alternateRegistry = 'https://other.registry.npmjs.org' +const basic = Buffer.from('test-user:test-password').toString('base64') + +const pkgJson = { + name: pkg, + description: 'npm test package', + version: '1.0.0', +} t.cleanSnapshot = data => { - return data.replace(/^ *"gitHead": .*$\n/gm, '') + return data.replace(/shasum:.*/g, 'shasum:{sha}') + .replace(/integrity:.*/g, 'integrity:{sha}') + .replace(/"shasum": ".*",/g, '"shasum": "{sha}",') + .replace(/"integrity": ".*",/g, '"integrity": "{sha}",') } -const { definitions } = require('../../../lib/utils/config') -const defaults = Object.entries(definitions).reduce((defaults, [key, def]) => { - defaults[key] = def.default - return defaults -}, {}) - -t.test( - /* eslint-disable-next-line max-len */ - 'should publish with libnpmpublish, passing through flatOptions and respecting publishConfig.registry', - async t => { - t.plan(6) - - const registry = 'https://some.registry' - const publishConfig = { registry } - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, +t.test('respects publishConfig.registry, runs appropriate scripts', async t => { + const { npm, joinedOutput, prefix } = await loadMockNpm(t, { + config: { + loglevel: 'silent', // prevent scripts from leaking to stdout during the test + [`${alternateRegistry.slice(6)}/:_authToken`]: 'test-other-token', + }, + prefixDir: { + 'package.json': JSON.stringify({ + ...pkgJson, + scripts: { + prepublishOnly: 'touch scripts-prepublishonly', + prepublish: 'touch scripts-prepublish', // should NOT run this one + publish: 'touch scripts-publish', + postpublish: 'touch scripts-postpublish', }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - // verify that we do NOT remove publishConfig if it was there originally - // and then removed during the script/pack process - libnpmpack: async () => { - fs.writeFileSync( - `${testDir}/package.json`, - JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - }) - ) - return Buffer.from('') - }, - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.type(tarData, Buffer, 'tarData is a buffer') - t.ok(opts, 'gets opts object') - t.same(opts.customValue, true, 'flatOptions values are passed through') - t.same(opts.registry, registry, 'publishConfig.registry is passed through') + publishConfig: { registry: alternateRegistry }, + }, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + }) + const registry = new MockRegistry({ + tap: t, + registry: alternateRegistry, + authorization: 'test-other-token', + }) + registry.nock.put(`/${pkg}`, body => { + return t.match(body, { + _id: pkg, + name: pkg, + 'dist-tags': { latest: '1.0.0' }, + access: null, + versions: { + '1.0.0': { + name: pkg, + version: '1.0.0', + _id: `${pkg}@1.0.0`, + dist: { + shasum: /\.*/, + tarball: `http:${alternateRegistry.slice(6)}/test-package/-/test-package-1.0.0.tgz`, + }, + publishConfig: { + registry: alternateRegistry, + }, }, }, - }) - const npm = mockNpm({ - flatOptions: { - customValue: true, - workspacesEnabled: true, + _attachments: { + [`${pkg}-1.0.0.tgz`]: {}, }, }) - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([testDir]) - } -) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') + t.resolveMatch(fs.exists(path.join(prefix, 'scripts-prepublishonly')), true, 'ran prepublishOnly') + t.resolveMatch( + fs.exists(path.join(prefix, 'scripts-prepublish')), + false, + 'did not run prepublish' + ) + t.resolveMatch(fs.exists(path.join(prefix, 'scripts-publish')), true, 'ran publish') + t.resolveMatch(fs.exists(path.join(prefix, 'scripts-postpublish')), true, 'ran postpublish') +}) t.test('re-loads publishConfig.registry if added during script process', async t => { - t.plan(5) - const registry = 'https://some.registry' - const publishConfig = { registry } - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', - version: '1.0.0', - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - libnpmpack: async () => { - fs.writeFileSync( - `${testDir}/package.json`, - JSON.stringify({ - name: 'my-cool-pkg', - version: '1.0.0', - publishConfig, - }) - ) - return Buffer.from('') - }, - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.type(tarData, Buffer, 'tarData is a buffer') - t.ok(opts, 'gets opts object') - t.same(opts.registry, registry, 'publishConfig.registry is passed through') - }, + const { joinedOutput, npm } = await loadMockNpm(t, { + config: { + [`${alternateRegistry.slice(6)}/:_authToken`]: 'test-other-token', }, - }) - const npm = mockNpm() - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([testDir]) -}) - -t.test('if loglevel=info and json, should not output package contents', async t => { - t.plan(3) - - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', - version: '1.0.0', - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', + prefixDir: { + 'package.json': JSON.stringify({ + ...pkgJson, + scripts: { + prepare: 'cp new.json package.json', + }, + }, null, 2), + 'new.json': JSON.stringify({ + ...pkgJson, + publishConfig: { registry: alternateRegistry }, }), - logTar: () => { - t.fail('logTar is not called in json mode') - }, - }, - libnpmpublish: { - publish: () => { - t.pass('publish called') - }, }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const npm = mockNpm({ - config: { json: true, loglevel: 'info' }, - output: () => { - t.pass('output is called') - }, - }, t) - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([testDir]) -}) - -t.test( - /* eslint-disable-next-line max-len */ - 'if loglevel=silent and dry-run, should not output package contents or publish, should log tarball contents', - async t => { - t.plan(2) - - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', + const registry = new MockRegistry({ + tap: t, + registry: alternateRegistry, + authorization: 'test-other-token', + }) + registry.nock.put(`/${pkg}`, body => { + return t.match(body, { + _id: pkg, + name: pkg, + 'dist-tags': { latest: '1.0.0' }, + access: null, + versions: { + '1.0.0': { + name: pkg, version: '1.0.0', - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') - }, - }, - libnpmpublish: { - publish: () => { - throw new Error('should not call libnpmpublish in dry run') - }, - }, - }) - const npm = mockNpm({ - config: { 'dry-run': true, loglevel: 'silent' }, - output: () => { - throw new Error('should not output in dry run mode') - }, - }, t) - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - - const publish = new Publish(npm) - - await publish.exec([testDir]) - } -) - -t.test( - /* eslint-disable-next-line max-len */ - 'if loglevel=info and dry-run, should not publish, should log package contents and log tarball contents', - async t => { - t.plan(3) - - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', - version: '1.0.0', - }, - null, - 2 - ), - }) - - const npm = mockNpm({ - config: { 'dry-run': true, loglevel: 'info' }, - output: () => { - t.pass('output fn is called') - }, - }, t) - const registry = npm.config.get('registry') - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { /* no token will call log.warn */ } - } - - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') - }, - 'proc-log': { - warn (_, msg) { - t.match(msg, - `This command requires you to be logged in to ${registry} (dry-run)`) + _id: `${pkg}@1.0.0`, + dist: { + shasum: /\.*/, + tarball: `http:${alternateRegistry.slice(6)}/test-package/-/test-package-1.0.0.tgz`, + }, + publishConfig: { + registry: alternateRegistry, }, }, }, - libnpmpublish: { - publish: () => { - throw new Error('should not call libnpmpublish in dry run') - }, + _attachments: { + [`${pkg}-1.0.0.tgz`]: {}, }, }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') +}) - const publish = new Publish(npm) +t.test('json', async t => { + const { joinedOutput, npm, logs } = await loadMockNpm(t, { + config: { + json: true, + ...auth, + }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock.put(`/${pkg}`).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(logs.notice) + t.matchSnapshot(joinedOutput(), 'new package json') +}) - await publish.exec([testDir]) - } -) +t.test('dry-run', async t => { + const { joinedOutput, npm, logs } = await loadMockNpm(t, { + config: { + 'dry-run': true, + ...auth, + }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + }) + await npm.exec('publish', []) + t.equal(joinedOutput(), `+ ${pkg}@1.0.0`) + t.matchSnapshot(logs.notice) +}) t.test('shows usage with wrong set of arguments', async t => { t.plan(1) @@ -276,279 +195,174 @@ t.test('shows usage with wrong set of arguments', async t => { }) t.test('throws when invalid tag', async t => { - t.plan(1) - - const Publish = t.mock('../../../lib/commands/publish.js') - const npm = mockNpm({ - config: { tag: '0.0.13' }, + const { npm } = await loadMockNpm(t, { + config: { + tag: '0.0.13', + }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const publish = new Publish(npm) - await t.rejects( - publish.exec([]), - /Tag name must not be a valid SemVer range: /, - 'throws when tag name is a valid SemVer range' + npm.exec('publish', []), + { message: 'Tag name must not be a valid SemVer range: 0.0.13' } ) }) -t.test('can publish a tarball', async t => { - t.plan(3) - - const testDir = t.testdir({ - tarball: {}, - package: { +t.test('tarball', async t => { + const { npm, joinedOutput, logs, home } = await loadMockNpm(t, { + config: { + 'fetch-retries': 0, + ...auth, + }, + homeDir: { 'package.json': JSON.stringify({ - name: 'my-cool-tarball', - version: '1.2.3', - }), - 'README.md': 'This is my readme', + name: 'test-tar-package', + description: 'this was from a tarball', + version: '1.0.0', + }, null, 2), + 'index.js': 'console.log("hello world"}', }, }) - const tar = require('tar') - tar.c( - { - cwd: testDir, - file: `${testDir}/tarball/package.tgz`, - sync: true, - }, - ['package'] - ) + const tarball = await pacote.tarball(home) + const tarFilename = path.join(home, 'tarball.tgz') + await fs.writeFile(tarFilename, tarball) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock.put('/test-tar-package', body => { + return t.match(body, { + name: 'test-tar-package', + }) + }).reply(200, {}) + await npm.exec('publish', [tarFilename]) + t.matchSnapshot(logs.notice) + t.matchSnapshot(joinedOutput(), 'new package json') +}) - const tarFile = fs.readFileSync(`${testDir}/tarball/package.tgz`) - const Publish = t.mock('../../../lib/commands/publish.js', { - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.match( - manifest, - { - name: 'my-cool-tarball', - version: '1.2.3', - readme: 'This is my readme', - description: 'This is my readme', - readmeFilename: 'README.md', - }, - 'sent manifest to lib pub' - ) - t.strictSame(tarData, tarFile, 'sent the tarball data to lib pub') - }, +t.test('no auth default registry', async t => { + const { npm } = await loadMockNpm(t, { + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const npm = mockNpm() - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([`${testDir}/tarball/package.tgz`]) -}) - -t.test('should check auth for default registry', async t => { - t.plan(2) - const npm = mockNpm() - const registry = npm.config.get('registry') - const errorMessage = `This command requires you to be logged in to ${registry}` - const Publish = t.mock('../../../lib/commands/publish.js') - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return {} - } - const publish = new Publish(npm) - await t.rejects( - publish.exec([]), - { message: errorMessage, code: 'ENEEDAUTH' }, - 'throws when not logged in' + npm.exec('publish', []), + { + message: 'This command requires you to be logged in to https://registry.npmjs.org/', + code: 'ENEEDAUTH', + } ) }) -t.test('should check auth for configured registry', async t => { - t.plan(2) - const registry = 'https://some.registry' - const errorMessage = 'This command requires you to be logged in to https://some.registry' - const Publish = t.mock('../../../lib/commands/publish.js') - const npm = mockNpm({ - flatOptions: { registry }, +t.test('no auth dry-run', async t => { + const { npm, joinedOutput, logs } = await loadMockNpm(t, { + config: { + 'dry-run': true, + }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return {} - } - const publish = new Publish(npm) - - await t.rejects( - publish.exec([]), - { message: errorMessage, code: 'ENEEDAUTH' }, - 'throws when not logged in' - ) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput()) + t.matchSnapshot(logs.warn, 'warns about auth being needed') }) -t.test('should check auth for scope specific registry', async t => { - t.plan(2) - const registry = 'https://some.registry' - const errorMessage = 'This command requires you to be logged in to https://some.registry' - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: '@npm/my-cool-pkg', - version: '1.0.0', - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js') - const npm = mockNpm({ - flatOptions: { '@npm:registry': registry }, +t.test('no auth for configured registry', async t => { + const { npm } = await loadMockNpm(t, { + config: { + registry: alternateRegistry, + ...auth, + }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return {} - } - const publish = new Publish(npm) - await t.rejects( - publish.exec([testDir]), - { message: errorMessage, code: 'ENEEDAUTH' }, - 'throws when not logged in' + npm.exec('publish', []), + { + message: `This command requires you to be logged in to ${alternateRegistry}`, + code: 'ENEEDAUTH', + } ) }) -t.test('should use auth for scope specific registry', async t => { - t.plan(3) - const registry = 'https://some.registry' - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: '@npm/my-cool-pkg', - version: '1.0.0', - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.ok(opts, 'gets opts object') - t.same(opts['@npm:registry'], registry, 'scope specific registry is passed through') - }, +t.test('no auth for scope configured registry', async t => { + const { npm } = await loadMockNpm(t, { + config: { + '@npm:registry': alternateRegistry, + ...auth, }, - }) - const npm = mockNpm({ - flatOptions: { '@npm:registry': registry }, - }) - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([testDir]) -}) - -t.test('read registry only from publishConfig', async t => { - t.plan(3) - - const registry = 'https://some.registry' - const publishConfig = { registry } - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', + prefixDir: { + 'package.json': JSON.stringify({ + name: '@npm/test-package', version: '1.0.0', - publishConfig, - }, - null, - 2 - ), - }) - - const Publish = t.mock('../../../lib/commands/publish.js', { - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.match(manifest, { name: 'my-cool-pkg', version: '1.0.0' }, 'gets manifest') - t.same(opts.registry, registry, 'publishConfig is passed through') - }, + }, null, 2), }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const npm = mockNpm() - npm.config.getCredentialsByURI = uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - - await publish.exec([testDir]) + await t.rejects( + npm.exec('publish', []), + { + message: `This command requires you to be logged in to ${alternateRegistry}`, + code: 'ENEEDAUTH', + } + ) }) -t.test('able to publish after if encountered multiple configs', async t => { - t.plan(2) - - const registry = 'https://some.registry' - const tag = 'better-tag' - const publishConfig = { registry } - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', +t.test('has auth for scope configured registry', async t => { + const spec = npa('@npm/test-package') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + '@npm:registry': alternateRegistry, + [`${alternateRegistry.slice(6)}/:_authToken`]: 'test-scope-token', + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: '@npm/test-package', version: '1.0.0', - publishConfig, - }, - null, - 2 - ), - }) - - const configList = [defaults] - configList.unshift( - Object.assign(Object.create(configList[0]), { - registry: `https://other.registry`, - tag: 'some-tag', - }) - ) - configList.unshift(Object.assign(Object.create(configList[0]), { tag })) - - const Publish = t.mock('../../../lib/commands/publish.js', { - libnpmpublish: { - publish: (manifest, tarData, opts) => { - t.same(opts.defaultTag, tag, 'gets option for expected tag') - }, + }, null, 2), }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const publish = new Publish({ - // what would be flattened by the configList created above - flatOptions: { - defaultTag: 'better-tag', - registry: 'https://other.registry', - }, - output () {}, - config: { - get: key => configList[0][key], - list: configList, - getCredentialsByURI: uri => { - t.same(uri, registry, 'gets credentials for expected registry') - return { token: 'some.registry.token' } - }, - }, + const registry = new MockRegistry({ + tap: t, + registry: alternateRegistry, + authorization: 'test-scope-token', }) - - await publish.exec([testDir]) + registry.nock.put(`/${spec.escapedName}`, body => { + return t.match(body, { name: '@npm/test-package' }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') }) t.test('workspaces', t => { - const testDir = t.testdir({ + const dir = { 'package.json': JSON.stringify( { - name: 'my-cool-pkg', - version: '1.0.0', - workspaces: ['workspace-a', 'workspace-b', 'workspace-c'], - }, - null, - 2 - ), + ...pkgJson, + workspaces: ['workspace-a', 'workspace-b', 'workspace-c', 'workspace-p'], + }, null, 2), 'workspace-a': { 'package.json': JSON.stringify({ name: 'workspace-a', @@ -569,304 +383,321 @@ t.test('workspaces', t => { version: '1.2.3-n', }), }, - }) - - const publishes = [] - const outputs = [] - t.beforeEach(() => { - npm.config.set('json', false) - outputs.length = 0 - publishes.length = 0 - }) - const Publish = t.mock('../../../lib/commands/publish.js', { - '../../../lib/utils/tar.js': { - getContents: manifest => ({ - id: manifest._id, + 'workspace-p': { + 'package.json': JSON.stringify({ + name: 'workspace-p', + version: '1.2.3-p', + private: true, }), - logTar: () => {}, - }, - libnpmpublish: { - publish: (manifest, tarballData, opts) => { - publishes.push(manifest) - }, - }, - }) - const npm = mockNpm({ - output: o => { - outputs.push(o) }, - }) - npm.localPrefix = testDir - npm.config.getCredentialsByURI = uri => { - return { token: 'some.registry.token' } } - const publish = new Publish(npm) - - t.test('all workspaces', async t => { - await publish.execWorkspaces([], []) - t.matchSnapshot(publishes, 'should publish all workspaces') - t.matchSnapshot(outputs, 'should output all publishes') - }) - - t.test('one workspace', async t => { - await publish.execWorkspaces([], ['workspace-a']) - t.matchSnapshot(publishes, 'should publish given workspace') - t.matchSnapshot(outputs, 'should output one publish') - }) - t.test('invalid workspace', async t => { - await t.rejects(publish.execWorkspaces([], ['workspace-x']), /No workspaces found/) - await t.rejects(publish.execWorkspaces([], ['workspace-x']), /workspace-x/) - }) - - t.test('json', async t => { - npm.config.set('json', true) - await publish.execWorkspaces([], []) - t.matchSnapshot(publishes, 'should publish all workspaces') - t.matchSnapshot(outputs, 'should output all publishes as json') - }) - t.end() -}) - -t.test('private workspaces', async t => { - const testDir = t.testdir({ - 'package.json': JSON.stringify({ - name: 'workspaces-project', - version: '1.0.0', - workspaces: ['packages/*'], - }), - packages: { - a: { - 'package.json': JSON.stringify({ - name: '@npmcli/a', - version: '1.0.0', - private: true, - }), + t.test('all workspaces - no color', async t => { + const { npm, joinedOutput, logs } = await loadMockNpm(t, { + config: { + color: false, + ...auth, + workspaces: true, }, - b: { - 'package.json': JSON.stringify({ - name: '@npmcli/b', - version: '1.0.0', - }), - }, - }, - }) - - const publishes = [] - const outputs = [] - t.beforeEach(() => { - npm.config.set('json', false) - outputs.length = 0 - publishes.length = 0 - }) - const mocks = { - '../../../lib/utils/tar.js': { - getContents: manifest => ({ - id: manifest._id, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, }), - logTar: () => {}, - }, - libnpmpublish: { - publish: (manifest, tarballData, opts) => { - if (manifest.private) { - throw Object.assign(new Error('private pkg'), { code: 'EPRIVATE' }) - } - publishes.push(manifest) - }, - }, - } - const npm = mockNpm({ - config: { loglevel: 'info' }, - output: o => { - outputs.push(o) - }, - }, t) - npm.localPrefix = testDir - npm.config.getCredentialsByURI = uri => { - return { token: 'some.registry.token' } - } - - t.test('with color', async t => { - t.plan(4) - - const Publish = t.mock('../../../lib/commands/publish.js', { - ...mocks, - 'proc-log': { - notice () {}, - verbose () {}, - warn (title, msg) { - t.equal(title, 'publish', 'should use publish warn title') - t.match( - msg, - /* eslint-disable-next-line max-len */ - 'Skipping workspace \u001b[32m@npmcli/a\u001b[39m, marked as \u001b[1mprivate\u001b[22m', - 'should display skip private workspace warn msg' - ) - }, - }, + prefixDir: dir, }) - const publish = new Publish(npm) - - npm.color = true - await publish.execWorkspaces([], []) - t.matchSnapshot(publishes, 'should publish all non-private workspaces') - t.matchSnapshot(outputs, 'should output all publishes') - npm.color = false + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock + .put('/workspace-a', body => { + return t.match(body, { name: 'workspace-a' }) + }).reply(200, {}) + .put('/workspace-b', body => { + return t.match(body, { name: 'workspace-b' }) + }).reply(200, {}) + .put('/workspace-n', body => { + return t.match(body, { name: 'workspace-n' }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'all public workspaces') + t.matchSnapshot(logs.warn, 'warns about skipped private workspace') + }) + + t.test('all workspaces - color', async t => { + const { npm, joinedOutput, logs } = await loadMockNpm(t, { + config: { + ...auth, + color: 'always', + workspaces: true, + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + prefixDir: dir, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock + .put('/workspace-a', body => { + return t.match(body, { name: 'workspace-a' }) + }).reply(200, {}) + .put('/workspace-b', body => { + return t.match(body, { name: 'workspace-b' }) + }).reply(200, {}) + .put('/workspace-n', body => { + return t.match(body, { name: 'workspace-n' }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'all public workspaces') + t.matchSnapshot(logs.warn, 'warns about skipped private workspace in color') + }) + + t.test('one workspace - success', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + ...auth, + workspace: ['workspace-a'], + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + prefixDir: dir, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock + .put('/workspace-a', body => { + return t.match(body, { name: 'workspace-a' }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'single workspace') + }) + + t.test('one workspace - failure', async t => { + const { npm } = await loadMockNpm(t, { + config: { + ...auth, + workspace: ['workspace-a'], + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + prefixDir: dir, + }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock + .put('/workspace-a', body => { + return t.match(body, { name: 'workspace-a' }) + }).reply(404, {}) + await t.rejects(npm.exec('publish', []), { code: 'E404' }) }) - t.test('colorless', async t => { - t.plan(4) - - const Publish = t.mock('../../../lib/commands/publish.js', { - ...mocks, - 'proc-log': { - notice () {}, - verbose () {}, - warn (title, msg) { - t.equal(title, 'publish', 'should use publish warn title') - t.equal( - msg, - 'Skipping workspace @npmcli/a, marked as private', - 'should display skip private workspace warn msg' - ) - }, + t.test('invalid workspace', async t => { + const { npm } = await loadMockNpm(t, { + config: { + ...auth, + workspace: ['workspace-x'], }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + prefixDir: dir, }) - const publish = new Publish(npm) - - await publish.execWorkspaces([], []) - t.matchSnapshot(publishes, 'should publish all non-private workspaces') - t.matchSnapshot(outputs, 'should output all publishes') + await t.rejects( + npm.exec('publish', []), + { message: 'No workspaces found:\n --workspace=workspace-x' } + ) }) - t.test('unexpected error', async t => { - t.plan(2) - - const Publish = t.mock('../../../lib/commands/publish.js', { - ...mocks, - libnpmpublish: { - publish: (manifest, tarballData, opts) => { - if (manifest.private) { - throw new Error('ERR') - } - publishes.push(manifest) - }, - }, - 'proc-log': { - notice (__, msg) { - t.match(msg, 'Publishing to https://registry.npmjs.org/') - }, - verbose () {}, - }, + t.test('json', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + ...auth, + workspaces: true, + json: true, + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + prefixDir: dir, }) - const publish = new Publish(npm) - - await t.rejects(publish.execWorkspaces([], []), /ERR/, 'should throw unexpected error') + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock + .put('/workspace-a', body => { + return t.match(body, { name: 'workspace-a' }) + }).reply(200, {}) + .put('/workspace-b', body => { + return t.match(body, { name: 'workspace-b' }) + }).reply(200, {}) + .put('/workspace-n', body => { + return t.match(body, { name: 'workspace-n' }) + }).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'all workspaces in json') }) - t.end() }) -t.test('runs correct lifecycle scripts', async t => { - t.plan(5) - - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', - version: '1.0.0', +t.test('ignore-scripts', async t => { + const { npm, joinedOutput, prefix } = await loadMockNpm(t, { + config: { + ...auth, + 'ignore-scripts': true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + ...pkgJson, scripts: { - prepublishOnly: 'echo test prepublishOnly', - prepublish: 'echo test prepublish', // should NOT run this one - publish: 'echo test publish', - postpublish: 'echo test postpublish', + prepublishOnly: 'touch scripts-prepublishonly', + prepublish: 'touch scripts-prepublish', // should NOT run this one + publish: 'touch scripts-publish', + postpublish: 'touch scripts-postpublish', }, - }, - null, - 2 - ), + }, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + authorization: token, + }) + registry.nock.put(`/${pkg}`).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') + t.resolveMatch( + fs.exists(path.join(prefix, 'scripts-prepublishonly')), + false, + 'did not run prepublishOnly' + ) + t.resolveMatch( + fs.exists(path.join(prefix, 'scripts-prepublish')), + false, + 'did not run prepublish' + ) + t.resolveMatch( + fs.exists(path.join(prefix, 'scripts-publish')), + false, + 'did not run publish' + ) + t.resolveMatch( + fs.exists(path.join(prefix, 'scripts-postpublish')), + false, + 'did not run postpublish' + ) +}) - const scripts = [] - const Publish = t.mock('../../../lib/commands/publish.js', { - '@npmcli/run-script': args => { - scripts.push(args) - }, - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') - }, +t.test('_auth config default registry', async t => { + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + _auth: basic, }, - libnpmpublish: { - publish: () => { - t.pass('publish called') - }, + prefixDir: { + 'package.json': JSON.stringify(pkgJson), }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const npm = mockNpm({ - config: { loglevel: 'info' }, - output: () => { - t.pass('output is called') - }, - }, t) - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - await publish.exec([testDir]) - t.same( - scripts.map(s => s.event), - ['prepublishOnly', 'publish', 'postpublish'], - 'runs only expected scripts, in order' - ) + const registry = new MockRegistry({ + tap: t, + registry: npm.config.get('registry'), + basic, + }) + registry.nock.put(`/${pkg}`).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') }) -t.test('does not run scripts on --ignore-scripts', async t => { - t.plan(4) - - const testDir = t.testdir({ - 'package.json': JSON.stringify( - { - name: 'my-cool-pkg', +t.test('bare _auth and registry config', async t => { + const spec = npa('@npm/test-package') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + registry: alternateRegistry, + _auth: basic, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: '@npm/test-package', version: '1.0.0', - }, - null, - 2 - ), + }, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) + const registry = new MockRegistry({ + tap: t, + registry: alternateRegistry, + basic, + }) + registry.nock.put(`/${spec.escapedName}`).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') +}) - const Publish = t.mock('../../../lib/commands/publish.js', { - '@npmcli/run-script': () => { - t.fail('should not call run-script') +t.test('bare _auth config scoped registry', async t => { + const { npm } = await loadMockNpm(t, { + config: { + '@npm:registry': alternateRegistry, + _auth: basic, }, - '../../../lib/utils/tar.js': { - getContents: () => ({ - id: 'someid', - }), - logTar: () => { - t.pass('logTar is called') - }, + prefixDir: { + 'package.json': JSON.stringify({ + name: '@npm/test-package', + version: '1.0.0', + }, null, 2), }, - libnpmpublish: { - publish: () => { - t.pass('publish called') - }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), + }) + await t.rejects( + npm.exec('publish', []), + { message: `This command requires you to be logged in to ${alternateRegistry}` } + ) +}) + +t.test('scoped _auth config scoped registry', async t => { + const spec = npa('@npm/test-package') + const { npm, joinedOutput } = await loadMockNpm(t, { + config: { + '@npm:registry': alternateRegistry, + [`${alternateRegistry.slice(6)}/:_auth`]: basic, }, + prefixDir: { + 'package.json': JSON.stringify({ + name: '@npm/test-package', + version: '1.0.0', + }, null, 2), + }, + globals: ({ prefix }) => ({ + 'process.cwd': () => prefix, + }), }) - const npm = mockNpm({ - config: { 'ignore-scripts': true, loglevel: 'info' }, - output: () => { - t.pass('output is called') - }, - }, t) - npm.config.getCredentialsByURI = uri => { - t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') - return { token: 'some.registry.token' } - } - const publish = new Publish(npm) - await publish.exec([testDir]) + const registry = new MockRegistry({ + tap: t, + registry: alternateRegistry, + basic, + }) + registry.nock.put(`/${spec.escapedName}`).reply(200, {}) + await npm.exec('publish', []) + t.matchSnapshot(joinedOutput(), 'new package version') }) diff --git a/deps/npm/test/lib/commands/unpublish.js b/deps/npm/test/lib/commands/unpublish.js index 829d41c5bb875c..28f93ea3e77a45 100644 --- a/deps/npm/test/lib/commands/unpublish.js +++ b/deps/npm/test/lib/commands/unpublish.js @@ -423,8 +423,8 @@ t.test('completion', async t => { packuments: ['1.0.0', '1.0.1'], }) await registry.package({ manifest, query: { write: true } }) - registry.nock.get('/-/whoami').reply(200, { username: user }) - .get('/-/org/test-user/package?format=cli').reply(200, { [pkg]: 'write' }) + registry.whoami({ username: user }) + registry.nock.get('/-/org/test-user/package?format=cli').reply(200, { [pkg]: 'write' }) await testComp(t, { argv: ['npm', 'unpublish'], @@ -445,8 +445,8 @@ t.test('completion', async t => { const manifest = registry.manifest({ name: pkg }) manifest.versions = {} await registry.package({ manifest, query: { write: true } }) - registry.nock.get('/-/whoami').reply(200, { username: user }) - .get('/-/org/test-user/package?format=cli').reply(200, { [pkg]: 'write' }) + registry.whoami({ username: user }) + registry.nock.get('/-/org/test-user/package?format=cli').reply(200, { [pkg]: 'write' }) await testComp(t, { argv: ['npm', 'unpublish'], @@ -464,12 +464,12 @@ t.test('completion', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - registry.nock.get('/-/whoami').reply(200, { username: user }) - .get('/-/org/test-user/package?format=cli').reply(200, { - [pkg]: 'write', - [`${pkg}a`]: 'write', - [`${pkg}b`]: 'write', - }) + registry.whoami({ username: user }) + registry.nock.get('/-/org/test-user/package?format=cli').reply(200, { + [pkg]: 'write', + [`${pkg}a`]: 'write', + [`${pkg}b`]: 'write', + }) await testComp(t, { argv: ['npm', 'unpublish'], @@ -488,7 +488,7 @@ t.test('completion', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - registry.nock.get('/-/whoami').reply(200, { username: user }) + registry.whoami({ username: user }) registry.nock.get('/-/org/test-user/package?format=cli').reply(200, {}) await testComp(t, { @@ -505,11 +505,11 @@ t.test('completion', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - registry.nock.get('/-/whoami').reply(200, { username: user }) - .get('/-/org/test-user/package?format=cli').reply(200, { - [pkg]: 'write', - [`${pkg}a`]: 'write', - }) + registry.whoami({ username: user }) + registry.nock.get('/-/org/test-user/package?format=cli').reply(200, { + [pkg]: 'write', + [`${pkg}a`]: 'write', + }) await testComp(t, { argv: ['npm', 'unpublish'], @@ -525,8 +525,8 @@ t.test('completion', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - registry.nock.get('/-/whoami').reply(200, { username: user }) - .get('/-/org/test-user/package?format=cli').reply(200, null) + registry.whoami({ username: user }) + registry.nock.get('/-/org/test-user/package?format=cli').reply(200, null) await testComp(t, { argv: ['npm', 'unpublish'], @@ -542,7 +542,7 @@ t.test('completion', async t => { registry: npm.config.get('registry'), authorization: 'test-auth-token', }) - registry.nock.get('/-/whoami').reply(404) + registry.whoami({ responseCode: 404 }) await testComp(t, { argv: ['npm', 'unpublish'], diff --git a/deps/npm/test/lib/utils/config/definitions.js b/deps/npm/test/lib/utils/config/definitions.js index b387835df55a34..088d0cdb6e1288 100644 --- a/deps/npm/test/lib/utils/config/definitions.js +++ b/deps/npm/test/lib/utils/config/definitions.js @@ -375,6 +375,8 @@ t.test('color', t => { t.strictSame(flat, { color: false, logColor: false }, 'true when --no-color') setTTY('stdout', false) + setTTY('stderr', false) + obj.color = true definitions.color.flatten('color', obj, flat) t.strictSame(flat, { color: false, logColor: false }, 'no color when stdout not tty') @@ -383,7 +385,6 @@ t.test('color', t => { t.strictSame(flat, { color: true, logColor: false }, '--color turns on color when stdout is tty') setTTY('stdout', false) - setTTY('stderr', false) obj.color = true definitions.color.flatten('color', obj, flat) t.strictSame(flat, { color: false, logColor: false }, 'no color when stderr not tty') diff --git a/deps/npm/test/lib/utils/read-package-name.js b/deps/npm/test/lib/utils/read-package-name.js deleted file mode 100644 index a1a1b4a1504dce..00000000000000 --- a/deps/npm/test/lib/utils/read-package-name.js +++ /dev/null @@ -1,33 +0,0 @@ -const t = require('tap') - -const readPackageName = require('../../../lib/utils/read-package-name.js') - -t.test('read local package.json', async (t) => { - const prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: 'my-local-package', - version: '1.0.0', - }), - }) - const packageName = await readPackageName(prefix) - t.equal( - packageName, - 'my-local-package', - 'should retrieve current package name' - ) -}) - -t.test('read local scoped-package.json', async (t) => { - const prefix = t.testdir({ - 'package.json': JSON.stringify({ - name: '@my-scope/my-local-package', - version: '1.0.0', - }), - }) - const packageName = await readPackageName(prefix) - t.equal( - packageName, - '@my-scope/my-local-package', - 'should retrieve scoped package name' - ) -})