diff --git a/.github/workflows/create-cli-deps-pr.yml b/.github/workflows/create-cli-deps-pr.yml index b8508388ce6ab..a59302ebeb0ac 100644 --- a/.github/workflows/create-cli-deps-pr.yml +++ b/.github/workflows/create-cli-deps-pr.yml @@ -52,7 +52,7 @@ jobs: base_branch="v14.x-staging" fi - git config user.name "npm-robot" + git config user.name "npm team" git config user.email "ops+robot@npmjs.com" git checkout -b "npm-$npm_tag" diff --git a/AUTHORS b/AUTHORS index e8ac394779953..9474540d33bae 100644 --- a/AUTHORS +++ b/AUTHORS @@ -786,3 +786,5 @@ Daniel Park Luke Karrys Ivan Aluneed <31174087+aluneed@users.noreply.github.com> +relrelb +Cameron Tacklind diff --git a/CHANGELOG.md b/CHANGELOG.md index f5ebb11ee8124..5154c16124a77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,89 @@ +## v7.20.0 (2021-07-15) + +### FEATURES + +* [`f17aca5cd`](https://github.com/npm/cli/commit/f17aca5cdf355aaa7e1f517d1b3bb4213f4df092) + [#3487](https://github.com/npm/cli/issues/3487) + feat: add `npm pkg` command + ([@ruyadorno](https://github.com/ruyadorno)) +* [`98905ae37`](https://github.com/npm/cli/commit/98905ae3759165cd6d6f6306f31acc6a2baa4cde) + [#3471](https://github.com/npm/cli/issues/3471) + feat(config): introduce `location` parameter + ([@nlf](https://github.com/nlf)) + +### BUG FIXES + +* [`4755b0728`](https://github.com/npm/cli/commit/4755b072877f547585cb0e2562261b2c87e2ff0b) + [#3498](https://github.com/npm/cli/issues/3498) + friendlier errors for `ERR_SOCKET_TIMEOUT` + ([@nlf](https://github.com/nlf)) +* [`3ecf19cdc`](https://github.com/npm/cli/commit/3ecf19cdc35684ccb15280b2c34d27496aa1c634) + [#3508](https://github.com/npm/cli/issues/3508) + fix(config): fix noproxy + ([@wraithgar](https://github.com/wraithgar)) +* [`c3bd10e46`](https://github.com/npm/cli/commit/c3bd10e461976a073e6a898c46f8bde28b17668f) + [#3499](https://github.com/npm/cli/issues/3499) + fix(update-notifier): don't force black background + ([@wraithgar](https://github.com/wraithgar)) +* [`89483e888`](https://github.com/npm/cli/commit/89483e888acc56386b9ebc4d70a4676e4a5a5cb1) + [#3497](https://github.com/npm/cli/issues/3497) + fix(usage): better audit/boolean flag usage output + ([@wraithgar](https://github.com/wraithgar)) +* [`feeb8e42a`](https://github.com/npm/cli/commit/feeb8e42a7b0510023175dc86269edb544d97601) + [#3495](https://github.com/npm/cli/issues/3495) + fix(publish): obey --ignore-scripts flag + ([@wraithgar](https://github.com/wraithgar)) +* [`103c8c3ef`](https://github.com/npm/cli/commit/103c8c3ef3ba7ff0483557f32eebc4c6298285e3) + [#3479](https://github.com/npm/cli/issues/3479) + chore(exit): log any un-ended timings + ([@wraithgar](https://github.com/wraithgar)) +* [`efc4313c2`](https://github.com/npm/cli/commit/efc4313c2062ffad22aa24e5198d575a7eb5f20e) + [#3482](https://github.com/npm/cli/issues/3482) + chore(refactor): refactor exit handler and tests + ([@wraithgar](https://github.com/wraithgar)) +* [`d8eb49b70`](https://github.com/npm/cli/commit/d8eb49b705acb50b6bed971bfcce4db6e18e73dd) + [#3540](https://github.com/npm/cli/issues/3540) + fix(bundle-and-ignore): case sensitivity cleanup + ([@wraithgar](https://github.com/wraithgar)) + +### DOCUMENTATION + +* [`339145f64`](https://github.com/npm/cli/commit/339145f64f82d540dbc72ef97b54ae20c34315dd) + [#3491](https://github.com/npm/cli/issues/3491) + fix(docs): clarify what install type gets `.bin` + ([@wraithgar](https://github.com/wraithgar)) +* [`74c99755e`](https://github.com/npm/cli/commit/74c99755e522f9cfc0d602841568d5e1f835fcaf) + [#3494](https://github.com/npm/cli/issues/3494) + fix(docs): add npm update example + ([@wraithgar](https://github.com/wraithgar)) +* [`801a52330`](https://github.com/npm/cli/commit/801a52330636008fecadc812916c76fb945ce1f6) + [#3542](https://github.com/npm/cli/issues/3542) + fix(docs): correct Node.js JavaScript stylings + ([@relrelb](https://github.com/relrelb)) +* [`791416713`](https://github.com/npm/cli/commit/791416713d64c072d73bffbab2daf7b8eb3c4868) + [#3546](https://github.com/npm/cli/issues/3546) + fix(docs): how to see background script output + ([@cinderblock](https://github.com/cinderblock)) + +### DEPENDENCIES + +* [`691816f3d`](https://github.com/npm/cli/commit/691816f3de2a679152644a60f3e2c5962df6a81d) + `@npmcli/arborist@2.7.1` + * fixes running prepare scripts for workspaces on reify + * ensure pacote always compares correct integrity values +* [`b9597e944`](https://github.com/npm/cli/commit/b9597e944377e74907607ee280ec1e8c31dd3156) + `make-fetch-happen@9.0.4` + * fix: retry socket timeout failures + * fix: clean up invalid indexes and content after cacache read errors +* [`f573e7c56`](https://github.com/npm/cli/commit/f573e7c56e8505fd6dcc3e5f5b5be401d0a45b58) + `minipass-fetch@1.3.4` + * fix: correctly handle error events that happen after response events +* [`2d5797ea0`](https://github.com/npm/cli/commit/2d5797ea01e17b1559d792613446e1435e588a35) + `pacote@11.3.5` + * fix: show more actionable messages for git pathspec errors + * fix: include all dep types when building for prepare + * fix: do not set mtime when unpacking + ## v7.19.1 (2021-07-01) ### BUG FIXES @@ -2334,7 +2420,7 @@ fix(lib/npm): do not clobber config.execPath fix: patch `config.js` to remove duplicate vals ([@darcyclarke](https://github.com/darcyclarke)) -### DOCUMENTION +### DOCUMENTATION * [`60769d757`](https://github.com/npm/cli/commit/60769d757859c88e2cceab66975f182a47822816) [#1911](https://github.com/npm/cli/pull/1911) docs: v7 npm-install diff --git a/docs/content/commands/npm-audit.md b/docs/content/commands/npm-audit.md index 704d7a15fb8f1..94b16b27bd7ed 100644 --- a/docs/content/commands/npm-audit.md +++ b/docs/content/commands/npm-audit.md @@ -232,6 +232,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -243,6 +244,9 @@ recommended that you do not use this option! Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `package-lock-only` diff --git a/docs/content/commands/npm-ci.md b/docs/content/commands/npm-ci.md index 9645bae7e2f43..31c92b13c5cdd 100644 --- a/docs/content/commands/npm-ci.md +++ b/docs/content/commands/npm-ci.md @@ -69,6 +69,16 @@ cache: +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + #### `ignore-scripts` * Default: false diff --git a/docs/content/commands/npm-config.md b/docs/content/commands/npm-config.md index f2868cb8909a9..9e76a23671e86 100644 --- a/docs/content/commands/npm-config.md +++ b/docs/content/commands/npm-config.md @@ -104,6 +104,9 @@ global config. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `global` @@ -128,6 +131,14 @@ folder instead of the current working directory. See The command to run for `npm edit` and `npm config edit`. +#### `location` + +* Default: "user" unless `--global` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to `npm config` this refers to which config file to use. + #### `long` * Default: false diff --git a/docs/content/commands/npm-dedupe.md b/docs/content/commands/npm-dedupe.md index fbccc41053292..324e6a71b7a3e 100644 --- a/docs/content/commands/npm-dedupe.md +++ b/docs/content/commands/npm-dedupe.md @@ -164,9 +164,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-explain.md b/docs/content/commands/npm-explain.md index 0e50d7ae43343..3a87ee8e438ba 100644 --- a/docs/content/commands/npm-explain.md +++ b/docs/content/commands/npm-explain.md @@ -63,6 +63,9 @@ node_modules/nyc/node_modules/find-up Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `workspace` diff --git a/docs/content/commands/npm-find-dupes.md b/docs/content/commands/npm-find-dupes.md index 28281d5678ab7..3b28f6443decd 100644 --- a/docs/content/commands/npm-find-dupes.md +++ b/docs/content/commands/npm-find-dupes.md @@ -107,9 +107,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-fund.md b/docs/content/commands/npm-fund.md index 3dc5292b490a1..ec5f5a37fdb71 100644 --- a/docs/content/commands/npm-fund.md +++ b/docs/content/commands/npm-fund.md @@ -73,6 +73,9 @@ test-workspaces-fund@1.0.0 Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `browser` diff --git a/docs/content/commands/npm-init.md b/docs/content/commands/npm-init.md index 23e8e70d9e913..54c3bdb4b74ab 100644 --- a/docs/content/commands/npm-init.md +++ b/docs/content/commands/npm-init.md @@ -175,6 +175,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! diff --git a/docs/content/commands/npm-install-ci-test.md b/docs/content/commands/npm-install-ci-test.md index c337905a0566e..2640311cf94be 100644 --- a/docs/content/commands/npm-install-ci-test.md +++ b/docs/content/commands/npm-install-ci-test.md @@ -20,6 +20,16 @@ This command runs `npm ci` followed immediately by `npm test`. +#### `audit` + +* Default: true +* Type: Boolean + +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. + #### `ignore-scripts` * Default: false diff --git a/docs/content/commands/npm-install-test.md b/docs/content/commands/npm-install-test.md index deefbd96b52fd..c8533cafedd7a 100644 --- a/docs/content/commands/npm-install-test.md +++ b/docs/content/commands/npm-install-test.md @@ -149,9 +149,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-install.md b/docs/content/commands/npm-install.md index e5091e6604c91..70d4c0d46ffeb 100644 --- a/docs/content/commands/npm-install.md +++ b/docs/content/commands/npm-install.md @@ -533,9 +533,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-link.md b/docs/content/commands/npm-link.md index b1c6066768a99..c7b385009519a 100644 --- a/docs/content/commands/npm-link.md +++ b/docs/content/commands/npm-link.md @@ -233,9 +233,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-ls.md b/docs/content/commands/npm-ls.md index 1f401fa956ff8..350f40a9991e5 100644 --- a/docs/content/commands/npm-ls.md +++ b/docs/content/commands/npm-ls.md @@ -91,6 +91,9 @@ upon by the current project. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `long` diff --git a/docs/content/commands/npm-org.md b/docs/content/commands/npm-org.md index e6df560acfba3..269f5cc3ee5b8 100644 --- a/docs/content/commands/npm-org.md +++ b/docs/content/commands/npm-org.md @@ -87,6 +87,9 @@ password, npm will prompt on the command line for one. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `parseable` diff --git a/docs/content/commands/npm-outdated.md b/docs/content/commands/npm-outdated.md index bc9263d7aeda7..40e5feafd4cc6 100644 --- a/docs/content/commands/npm-outdated.md +++ b/docs/content/commands/npm-outdated.md @@ -104,6 +104,9 @@ upon by the current project. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `long` diff --git a/docs/content/commands/npm-pack.md b/docs/content/commands/npm-pack.md index 9507026278437..cd4a175919e7e 100644 --- a/docs/content/commands/npm-pack.md +++ b/docs/content/commands/npm-pack.md @@ -34,6 +34,9 @@ Note: This is NOT honored by other network related commands, eg `dist-tags`, Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `pack-destination` diff --git a/docs/content/commands/npm-pkg.md b/docs/content/commands/npm-pkg.md new file mode 100644 index 0000000000000..78b13cf9e9a00 --- /dev/null +++ b/docs/content/commands/npm-pkg.md @@ -0,0 +1,245 @@ +--- +title: npm-pkg +section: 1 +description: Manages your package.json +--- + +### Synopsis + +```bash +npm pkg get [ [. ...]] +npm pkg set = [.= ...] +npm pkg delete [. ...] +``` + +### Description + +A command that automates the management of `package.json` files. +`npm pkg` provide 3 different sub commands that allow you to modify or retrieve +values for given object keys in your `packge.json`. + +The syntax to retrieve and set fields is a dot separated representation of +the nested object properties to be found within your `package.json`, it's the +same notation used in [`npm view`](/commands/npm-view) to retrieve information +from the registry manifest, below you can find more examples on how to use it. + +Returned values are always in **json** format. + +* `npm pkg get ` + + Retrieves a value `key`, defined in your `package.json` file. + + For example, in order to retrieve the name of the current package, you + can run: + + ```bash + npm pkg get name + ``` + + It's also possible to retrieve multiple values at once: + + ```bash + npm pkg get name version + ``` + + You can view child fields by separating them with a period. To retrieve + the value of a test `script` value, you would run the following command: + + ```bash + npm pkg get scripts.test + ``` + + For fields that are arrays, requesting a non-numeric field will return + all of the values from the objects in the list. For example, to get all + the contributor emails for a package, you would run: + + ```bash + npm pkg get contributors.email + ``` + + You may also use numeric indices in square braces to specifically select + an item in an array field. To just get the email address of the first + contributor in the list, you can run: + + ```bash + npm pkg get contributors[0].email + ``` + +* `npm pkg set =` + + Sets a `value` in your `package.json` based on the `field` value. When + saving to your `package.json` file the same set of rules used during + `npm install` and other cli commands that touches the `package.json` file + are used, making sure to respect the existing indentation and possibly + applying some validation prior to saving values to the file. + + The same syntax used to retrieve values from your package can also be used + to define new properties or overriding existing ones, below are some + examples of how the dot separated syntax can be used to edit your + `package.json` file. + + Defining a new bin named `mynewcommand` in your `package.json` that points + to a file `cli.js`: + + ```bash + npm pkg set bin.mynewcommand=cli.js + ``` + + Setting multiple fields at once is also possible: + + ```bash + npm pkg set description='Awesome package' engines.node='>=10' + ``` + + It's also possible to add to array values, for example to add a new + contributor entry: + + ```bash + npm pkg set contributors[0].name='Foo' contributors[0].email='foo@bar.ca' + ``` + + You may also append items to the end of an array using the special + empty bracket notation: + + ```bash + npm pkg set contributors[].name='Foo' contributors[].name='Bar' + ``` + + It's also possible to parse values as json prior to saving them to your + `package.json` file, for example in order to set a `"private": true` + property: + + ```bash + npm pkg set private=true --json + ``` + + It also enables saving values as numbers: + + ```bash + npm pkg set tap.timeout=60 --json + ``` + +* `npm pkg delete ` + + Deletes a `key` from your `package.json` + + The same syntax used to set values from your package can also be used + to remove existing ones. For example, in order to remove a script named + build: + + ```bash + npm pkg delete scripts.build + ``` + +### Workspaces support + +You can set/get/delete items across your configured workspaces by using the +`workspace` or `workspaces` config options. + +For example, setting a `funding` value across all configured workspaces +of a project: + +```bash +npm pkg set funding=https://example.com --ws +``` + +When using `npm pkg get` to retrieve info from your configured workspaces, the +returned result will be in a json format in which top level keys are the +names of each workspace, the values of these keys will be the result values +returned from each of the configured workspaces, e.g: + +``` +npm pkg get name version --ws +{ + "a": { + "name": "a", + "version": "1.0.0" + }, + "b": { + "name": "b", + "version": "1.0.0" + } +} +``` + +### Configuration + + + +#### `force` + +* Default: false +* Type: Boolean + +Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input. + +* Allow clobbering non-npm files in global installs. +* Allow the `npm version` command to work on an unclean git repository. +* Allow deleting the cache folder with `npm cache clean`. +* Allow installing packages that have an `engines` declaration requiring a + different version of npm. +* Allow installing packages that have an `engines` declaration requiring a + different version of `node`, even if `--engine-strict` is enabled. +* Allow `npm audit fix` to install modules outside your stated dependency + range (including SemVer-major changes). +* Allow unpublishing all versions of a published package. +* Allow conflicting peerDependencies to be installed in the root project. +* Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` + +If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option! + +#### `json` + +* Default: false +* Type: Boolean + +Whether or not to output JSON data, rather than the normal output. + +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + +Not supported by all npm commands. + +#### `workspace` + +* Default: +* Type: String (can be set multiple times) + +Enable running a command in the context of the configured workspaces of the +current project while filtering by running only the workspaces defined by +this configuration option. + +Valid values for the `workspace` config are either: + +* Workspace names +* Path to a workspace directory +* Path to a parent workspace directory (will result to selecting all of the + nested workspaces) + +When set for the `npm init` command, this may be set to the folder of a +workspace which does not yet exist, to create the folder and set it up as a +brand new workspace within the project. + +This value is not exported to the environment for child processes. + +#### `workspaces` + +* Default: false +* Type: Boolean + +Enable running a command in the context of **all** the configured +workspaces. + +This value is not exported to the environment for child processes. + + +## See Also + +* [npm install](/commands/npm-install) +* [npm init](/commands/npm-init) +* [npm config](/commands/npm-config) +* [npm set-script](/commands/npm-set-script) +* [workspaces](/using-npm/workspaces) diff --git a/docs/content/commands/npm-profile.md b/docs/content/commands/npm-profile.md index 63aa46540d32d..079440d785815 100644 --- a/docs/content/commands/npm-profile.md +++ b/docs/content/commands/npm-profile.md @@ -91,6 +91,9 @@ The base URL of the npm registry. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `parseable` diff --git a/docs/content/commands/npm-prune.md b/docs/content/commands/npm-prune.md index ecb6bdcd6cb14..d9b5b068f7a4b 100644 --- a/docs/content/commands/npm-prune.md +++ b/docs/content/commands/npm-prune.md @@ -75,6 +75,9 @@ Note: This is NOT honored by other network related commands, eg `dist-tags`, Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `workspace` diff --git a/docs/content/commands/npm-search.md b/docs/content/commands/npm-search.md index 08c955e64b555..e30287635b56f 100644 --- a/docs/content/commands/npm-search.md +++ b/docs/content/commands/npm-search.md @@ -55,6 +55,9 @@ Show extended information in `ls`, `search`, and `help-search`. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `color` diff --git a/docs/content/commands/npm-team.md b/docs/content/commands/npm-team.md index 31b09c7ce22b7..c7d5defcc63c1 100644 --- a/docs/content/commands/npm-team.md +++ b/docs/content/commands/npm-team.md @@ -138,6 +138,9 @@ Output parseable results from commands that write to standard output. For Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. diff --git a/docs/content/commands/npm-unpublish.md b/docs/content/commands/npm-unpublish.md index bc8fbc7a53b3d..82779ab65938c 100644 --- a/docs/content/commands/npm-unpublish.md +++ b/docs/content/commands/npm-unpublish.md @@ -82,6 +82,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! diff --git a/docs/content/commands/npm-update.md b/docs/content/commands/npm-update.md index 4c9271c6633c3..c4f7694e19a81 100644 --- a/docs/content/commands/npm-update.md +++ b/docs/content/commands/npm-update.md @@ -15,7 +15,9 @@ aliases: up, upgrade ### Description This command will update all the packages listed to the latest version -(specified by the `tag` config), respecting semver. +(specified by the `tag` config), respecting the semver constraints of +both your package and its dependencies (if they also require the same +package). It will also install missing packages. @@ -101,6 +103,39 @@ Then `npm update` will install `dep1@0.4.1`, because that is the highest-sorting version that satisfies `^0.4.0` (`>= 0.4.0 <0.5.0`) +#### Subdependencies + +Suppose your app now also has a dependency on `dep2` + +```json +{ + "name": "my-app", + "dependencies": { + "dep1": "^1.0.0", + "dep2": "1.0.0" + } +} +``` + +and `dep2` itself depends on this limited range of `dep1` + +```json +{ +"name": "dep2", + "dependencies": { + "dep1": "~1.1.1" + } +} +``` + +Then `npm update` will install `dep1@1.1.2` because that is the highest +version that `dep2` allows. npm will prioritize having a single version +of `dep1` in your tree rather than two when that single version can +satisfy the semver requirements of multiple dependencies in your tree. +In this case if you really did need your package to use a newer version +you would need to use `npm install`. + + #### Updating Globally-Installed Packages `npm update -g` will apply the `update` action to each globally installed @@ -220,9 +255,10 @@ will *not* run any pre- or post-scripts. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `bin-links` diff --git a/docs/content/commands/npm-version.md b/docs/content/commands/npm-version.md index d24207d1e7e34..a3e34153a06da 100644 --- a/docs/content/commands/npm-version.md +++ b/docs/content/commands/npm-version.md @@ -47,6 +47,9 @@ Tag the commit when using the `npm version` command. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `preid` diff --git a/docs/content/commands/npm-view.md b/docs/content/commands/npm-view.md index 8cbd3267b4bf0..b3d5df86e34a4 100644 --- a/docs/content/commands/npm-view.md +++ b/docs/content/commands/npm-view.md @@ -49,7 +49,7 @@ npm view opts@$(npm view ronn dependencies.opts) For fields that are arrays, requesting a non-numeric field will return all of the values from the objects in the list. For example, to get all -the contributor names for the `express` package, you would run: +the contributor email addresses for the `express` package, you would run: ```bash npm view express contributors.email @@ -105,6 +105,9 @@ npm view connect versions Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `workspace` diff --git a/docs/content/configuring-npm/folders.md b/docs/content/configuring-npm/folders.md index 75e31cd733161..218870765b262 100644 --- a/docs/content/configuring-npm/folders.md +++ b/docs/content/configuring-npm/folders.md @@ -45,14 +45,16 @@ Global installs on Windows go to `{prefix}/node_modules` (that is, no Scoped packages are installed the same way, except they are grouped together in a sub-folder of the relevant `node_modules` folder with the name of that scope prefix by the @ symbol, e.g. `npm install @myorg/package` would place -the package in `{prefix}/node_modules/@myorg/package`. See [`scope`](/using-npm/scope) for more details. +the package in `{prefix}/node_modules/@myorg/package`. See +[`scope`](/using-npm/scope) for more details. If you wish to `require()` a package, then install it locally. #### Executables When in global mode, executables are linked into `{prefix}/bin` on Unix, -or directly into `{prefix}` on Windows. +or directly into `{prefix}` on Windows. Ensure that path is in your +terminal's `PATH` environment to run them. When in local mode, executables are linked into `./node_modules/.bin` so that they can be made available to scripts run diff --git a/docs/content/configuring-npm/package-json.md b/docs/content/configuring-npm/package-json.md index 5cacf68ba175f..0fc5dc5075ee3 100644 --- a/docs/content/configuring-npm/package-json.md +++ b/docs/content/configuring-npm/package-json.md @@ -341,9 +341,12 @@ install into the PATH. npm makes this pretty easy (in fact, it uses this feature to install the "npm" executable.) To use this, supply a `bin` field in your package.json which is a map of -command name to local file name. On install, npm will symlink that file -into `prefix/bin` for global installs, or `./node_modules/.bin/` for local -installs. +command name to local file name. When this package is installed +globally, that file will be linked where global bins go so it is +available to run by name. When this package is installed as a +dependency in another package, the file will be linked where it will be +available to that package either directly by `npm exec` or by name in other +scripts when invoking them via `npm run-script`. For example, myapp could have this: @@ -388,6 +391,9 @@ executable! Note that you can also set the executable files using [directories.bin](#directoriesbin). +See [folders](/configuring-npm/folders#executables) for more info on +executables. + ### man Specify either a single file or an array of filenames to put in place for diff --git a/docs/content/using-npm/config.md b/docs/content/using-npm/config.md index 1036895101fcc..c4d1afed35cc8 100644 --- a/docs/content/using-npm/config.md +++ b/docs/content/using-npm/config.md @@ -67,6 +67,7 @@ The following shorthands are parsed on the command-line: * `--desc`: `--description` * `-f`: `--force` * `-g`: `--global` +* `-L`: `--location` * `-d`: `--loglevel info` * `-s`: `--loglevel silent` * `--silent`: `--loglevel silent` @@ -170,9 +171,10 @@ to the same value as the current version. * Default: true * Type: Boolean -When "true" submit audit reports alongside `npm install` runs to the default -registry and all registries configured for scopes. See the documentation for -[`npm audit`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [`npm audit`](/commands/npm-audit) for details on what is +submitted. #### `audit-level` @@ -495,6 +497,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set `--yes` during `npm init`. +* Allow clobbering existing values in `npm pkg` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -694,6 +697,9 @@ number, if not already set in package.json. Whether or not to output JSON data, rather than the normal output. +* In `npm pkg set` it enables parsing set values with JSON.parse() before + saving them to your `package.json`. + Not supported by all npm commands. #### `key` @@ -753,6 +759,14 @@ Used with `npm ls`, limiting output to only those packages that are linked. The IP address of the local interface to use when making connections to the npm registry. Must be IPv4 in versions of Node prior to 0.12. +#### `location` + +* Default: "user" unless `--global` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to `npm config` this refers to which config file to use. + #### `loglevel` * Default: "notice" diff --git a/docs/content/using-npm/scripts.md b/docs/content/using-npm/scripts.md index 8df9660ca8f4c..8fd5c5c0dbc9d 100644 --- a/docs/content/using-npm/scripts.md +++ b/docs/content/using-npm/scripts.md @@ -54,7 +54,8 @@ situations. These scripts happen in addition to the `pre`, `post`, the prepare script will be run, before the package is packaged and installed. -* As of `npm@7` these scripts run in the background +* As of `npm@7` these scripts run in the background. + To see the output, run with: `--foreground-scripts`. **prepublish** (DEPRECATED) * Does not run during `npm publish`, but does run during `npm ci` @@ -303,8 +304,8 @@ Scripts are run by passing the line as a script argument to `sh`. If the script exits with a code other than 0, then this will abort the process. -Note that these script files don't have to be nodejs or even -javascript programs. They just have to be some kind of executable +Note that these script files don't have to be Node.js or even +JavaScript programs. They just have to be some kind of executable file. ### Best Practices diff --git a/docs/content/using-npm/workspaces.md b/docs/content/using-npm/workspaces.md index 829168864abe4..7cc125b3c7a7c 100644 --- a/docs/content/using-npm/workspaces.md +++ b/docs/content/using-npm/workspaces.md @@ -37,7 +37,7 @@ Workspaces are usually defined via the `workspaces` property of the Given the above `package.json` example living at a current working directory `.` that contains a folder named `workspace-a` that itself contains -a `package.json` inside it, defining a nodejs package, e.g: +a `package.json` inside it, defining a Node.js package, e.g: ``` . diff --git a/docs/nav.yml b/docs/nav.yml index 44e4b2f879370..a45aefbb03d28 100644 --- a/docs/nav.yml +++ b/docs/nav.yml @@ -117,6 +117,9 @@ - title: npm ping url: /commands/npm-ping description: Ping npm registry + - title: npm pkg + url: /commands/npm-pkg + description: Manages your package.json - title: npm prefix url: /commands/npm-prefix description: Display prefix diff --git a/lib/base-command.js b/lib/base-command.js index 4077733a934b0..870c69acc492d 100644 --- a/lib/base-command.js +++ b/lib/base-command.js @@ -75,7 +75,6 @@ class BaseCommand { } async setWorkspaces (filters) { - // TODO npm guards workspaces/global mode so we should use this.npm.prefix? const ws = await getWorkspaces(filters, { path: this.npm.localPrefix }) this.workspaces = ws this.workspaceNames = [...ws.keys()] diff --git a/lib/ci.js b/lib/ci.js index 3ff4b65badb49..6634ffcdc19bc 100644 --- a/lib/ci.js +++ b/lib/ci.js @@ -33,6 +33,7 @@ class CI extends ArboristWorkspaceCmd { /* istanbul ignore next - see test/lib/load-all-commands.js */ static get params () { return [ + 'audit', 'ignore-scripts', 'script-shell', ] diff --git a/lib/config.js b/lib/config.js index d1585f414dca8..a56dd92ffbde6 100644 --- a/lib/config.js +++ b/lib/config.js @@ -56,6 +56,7 @@ class Config extends BaseCommand { 'json', 'global', 'editor', + 'location', 'long', ] } @@ -137,7 +138,7 @@ class Config extends BaseCommand { if (!args.length) throw this.usageError() - const where = this.npm.config.get('global') ? 'global' : 'user' + const where = this.npm.config.get('location') for (const [key, val] of Object.entries(keyValues(args))) { this.npm.log.info('config', 'set %j %j', key, val) this.npm.config.set(key, val || '', where) @@ -167,16 +168,15 @@ class Config extends BaseCommand { if (!keys.length) throw this.usageError() - const where = this.npm.config.get('global') ? 'global' : 'user' + const where = this.npm.config.get('location') for (const key of keys) this.npm.config.delete(key, where) await this.npm.config.save(where) } async edit () { - const global = this.npm.config.get('global') const e = this.npm.config.get('editor') - const where = global ? 'global' : 'user' + const where = this.npm.config.get('location') const file = this.npm.config.data.get(where).source // save first, just to make sure it's synced up diff --git a/lib/install-ci-test.js b/lib/install-ci-test.js index 0d408178b33f2..871f24b2f32d6 100644 --- a/lib/install-ci-test.js +++ b/lib/install-ci-test.js @@ -13,14 +13,6 @@ class InstallCITest extends CI { return 'install-ci-test' } - /* istanbul ignore next - see test/lib/load-all-commands.js */ - static get params () { - return [ - 'ignore-scripts', - 'script-shell', - ] - } - exec (args, cb) { this.npm.commands.ci(args, (er) => { if (er) diff --git a/lib/ls.js b/lib/ls.js index 7540692911976..91e9a9dd3dba8 100644 --- a/lib/ls.js +++ b/lib/ls.js @@ -138,7 +138,7 @@ class LS extends ArboristWorkspaceCmd { !(node instanceof Arborist.Node) || (currentDepth > depthToPrint) return (shouldSkipChildren) ? [] - : [...(node.target || node).edgesOut.values()] + : [...(node.target).edgesOut.values()] .filter(filterBySelectedWorkspaces) .filter(filterByEdgesTypes({ currentDepth, diff --git a/lib/npm.js b/lib/npm.js index 7046a84d0bcfa..db3559a384bd7 100644 --- a/lib/npm.js +++ b/lib/npm.js @@ -1,12 +1,7 @@ -// The order of the code in this file is relevant, because a lot of things -// require('npm.js'), but also we need to use some of those modules. So, -// we define and instantiate the singleton ahead of loading any modules -// required for its methods. - -// these are all dependencies used in the ctor const EventEmitter = require('events') const { resolve, dirname } = require('path') const Config = require('@npmcli/config') +const log = require('npmlog') // Patch the global fs module here at the app level require('graceful-fs').gracefulify(require('fs')) @@ -37,23 +32,51 @@ const proxyCmds = new Proxy({}, { }, }) +// Timers in progress +const timers = new Map() +// Finished timers +const timings = {} + +const processOnTimeHandler = (name) => { + timers.set(name, Date.now()) +} + +const processOnTimeEndHandler = (name) => { + if (timers.has(name)) { + const ms = Date.now() - timers.get(name) + log.timing(name, `Completed in ${ms}ms`) + timings[name] = ms + timers.delete(name) + } else + log.silly('timing', "Tried to end timer that doesn't exist:", name) +} + const { definitions, flatten, shorthands } = require('./utils/config/index.js') const { shellouts } = require('./utils/cmd-list.js') const usage = require('./utils/npm-usage.js') +const which = require('which') + +const deref = require('./utils/deref-command.js') +const setupLog = require('./utils/setup-log.js') +const cleanUpLogFiles = require('./utils/cleanup-log-files.js') +const getProjectScope = require('./utils/get-project-scope.js') + let warnedNonDashArg = false const _runCmd = Symbol('_runCmd') const _load = Symbol('_load') const _tmpFolder = Symbol('_tmpFolder') const _title = Symbol('_title') + const npm = module.exports = new class extends EventEmitter { constructor () { super() - // TODO make this only ever load once (or unload) in tests - require('./utils/perf.js') this.started = Date.now() this.command = null this.commands = proxyCmds + this.timings = timings + this.timers = timers + this.perfStart() procLogListener() process.emit('time', 'npm') this.version = require('../package.json').version @@ -67,6 +90,16 @@ const npm = module.exports = new class extends EventEmitter { this.updateNotification = null } + perfStart () { + process.on('time', processOnTimeHandler) + process.on('timeEnd', processOnTimeEndHandler) + } + + perfStop () { + process.off('time', processOnTimeHandler) + process.off('timeEnd', processOnTimeEndHandler) + } + get shelloutCommands () { return shellouts } @@ -317,16 +350,5 @@ const npm = module.exports = new class extends EventEmitter { } }() -// now load everything required by the class methods - -const log = require('npmlog') - -const which = require('which') - -const deref = require('./utils/deref-command.js') -const setupLog = require('./utils/setup-log.js') -const cleanUpLogFiles = require('./utils/cleanup-log-files.js') -const getProjectScope = require('./utils/get-project-scope.js') - if (require.main === module) require('./cli.js')(process) diff --git a/lib/outdated.js b/lib/outdated.js index 9d60d143d71ce..01e268fe96aee 100644 --- a/lib/outdated.js +++ b/lib/outdated.js @@ -158,7 +158,7 @@ class Outdated extends ArboristWorkspaceCmd { edge.from && this.filterSet && this.filterSet.size > 0 - && !this.filterSet.has(edge.from.target || edge.from) + && !this.filterSet.has(edge.from.target) if (filteredOut) return diff --git a/lib/pkg.js b/lib/pkg.js new file mode 100644 index 0000000000000..9ba92c930e1f0 --- /dev/null +++ b/lib/pkg.js @@ -0,0 +1,152 @@ +const PackageJson = require('@npmcli/package-json') +const BaseCommand = require('./base-command.js') +const Queryable = require('./utils/queryable.js') + +class Pkg extends BaseCommand { + static get description () { + return 'Manages your package.json' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get name () { + return 'pkg' + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get usage () { + return [ + 'set = [= ...]', + 'get [ [ ...]]', + 'delete [ ...]', + ] + } + + /* istanbul ignore next - see test/lib/load-all-commands.js */ + static get params () { + return [ + 'force', + 'json', + 'workspace', + 'workspaces', + ] + } + + exec (args, cb) { + this.prefix = this.npm.localPrefix + this.pkg(args).then(() => cb()).catch(cb) + } + + execWorkspaces (args, filters, cb) { + this.pkgWorkspaces(args, filters).then(() => cb()).catch(cb) + } + + async pkg (args) { + if (this.npm.config.get('global')) { + throw Object.assign( + new Error(`There's no package.json file to manage on global mode`), + { code: 'EPKGGLOBAL' } + ) + } + + const [cmd, ..._args] = args + switch (cmd) { + case 'get': + return this.get(_args) + case 'set': + return this.set(_args) + case 'delete': + return this.delete(_args) + default: + throw this.usageError() + } + } + + async pkgWorkspaces (args, filters) { + await this.setWorkspaces(filters) + const result = {} + for (const [workspaceName, workspacePath] of this.workspaces.entries()) { + this.prefix = workspacePath + result[workspaceName] = await this.pkg(args) + } + // when running in workspaces names, make sure to key by workspace + // name the results of each value retrieved in each ws + this.npm.output(JSON.stringify(result, null, 2)) + } + + async get (args) { + const pkgJson = await PackageJson.load(this.prefix) + + const { content } = pkgJson + let result = !args.length && content + + if (!result) { + const q = new Queryable(content) + result = q.query(args) + + // in case there's only a single result from the query + // just prints that one element to stdout + if (Object.keys(result).length === 1) + result = result[args] + } + + // only outputs if not running with workspaces config, + // in case you're retrieving info for workspaces the pkgWorkspaces + // will handle the output to make sure it get keyed by ws name + if (!this.workspaces) + this.npm.output(JSON.stringify(result, null, 2)) + + return result + } + + async set (args) { + const setError = () => + Object.assign( + new TypeError('npm pkg set expects a key=value pair of args.'), + { code: 'EPKGSET' } + ) + + if (!args.length) + throw setError() + + const force = this.npm.config.get('force') + const json = this.npm.config.get('json') + const pkgJson = await PackageJson.load(this.prefix) + const q = new Queryable(pkgJson.content) + for (const arg of args) { + const [key, ...rest] = arg.split('=') + const value = rest.join('=') + if (!key || !value) + throw setError() + + q.set(key, json ? JSON.parse(value) : value, { force }) + } + + pkgJson.update(q.toJSON()) + await pkgJson.save() + } + + async delete (args) { + const setError = () => + Object.assign( + new TypeError('npm pkg delete expects key args.'), + { code: 'EPKGDELETE' } + ) + + if (!args.length) + throw setError() + + const pkgJson = await PackageJson.load(this.prefix) + const q = new Queryable(pkgJson.content) + for (const key of args) { + if (!key) + throw setError() + + q.delete(key) + } + + pkgJson.update(q.toJSON()) + await pkgJson.save() + } +} + +module.exports = Pkg diff --git a/lib/publish.js b/lib/publish.js index f35388a30f4ed..9c747eb5068f0 100644 --- a/lib/publish.js +++ b/lib/publish.js @@ -66,6 +66,7 @@ class Publish extends BaseCommand { const dryRun = this.npm.config.get('dry-run') const json = this.npm.config.get('json') const defaultTag = this.npm.config.get('tag') + const ignoreScripts = this.npm.config.get('ignore-scripts') const silent = log.level === 'silent' if (semver.validRange(defaultTag)) @@ -82,7 +83,7 @@ class Publish extends BaseCommand { flatten(manifest.publishConfig, opts) // only run scripts for directory type publishes - if (spec.type === 'directory') { + if (spec.type === 'directory' && !ignoreScripts) { await runScript({ event: 'prepublishOnly', path: spec.fetchSpec, @@ -119,7 +120,7 @@ class Publish extends BaseCommand { await otplease(opts, opts => libpub(manifest, tarballData, opts)) } - if (spec.type === 'directory') { + if (spec.type === 'directory' && !ignoreScripts) { await runScript({ event: 'publish', path: spec.fetchSpec, diff --git a/lib/utils/cmd-list.js b/lib/utils/cmd-list.js index c865cdabb4014..26da539006588 100644 --- a/lib/utils/cmd-list.js +++ b/lib/utils/cmd-list.js @@ -122,6 +122,7 @@ const cmdList = [ 'diff', 'dist-tag', 'ping', + 'pkg', 'test', 'stop', diff --git a/lib/utils/config/definition.js b/lib/utils/config/definition.js index 507be6a644042..1354851326adf 100644 --- a/lib/utils/config/definition.js +++ b/lib/utils/config/definition.js @@ -91,13 +91,27 @@ ${noEnvExport}`) } const describeUsage = (def) => { - let key = `--${def.key}` - if (def.short && typeof def.short === 'string') - key = `-${def.short}|${key}` + let key = '' // Single type - if (!Array.isArray(def.type)) - return `${key}${def.type === Boolean ? '' : ' ' + def.hint}` + if (!Array.isArray(def.type)) { + if (def.short) + key = `-${def.short}|` + + if (def.type === Boolean && def.default !== false) + key = `${key}--no-${def.key}` + else + key = `${key}--${def.key}` + + if (def.type !== Boolean) + key = `${key} ${def.hint}` + + return key + } + + key = `--${def.key}` + if (def.short) + key = `-${def.short}|--${def.key}` // Multiple types let types = def.type @@ -120,8 +134,12 @@ const describeUsage = (def) => { description = def.hint } - if (bool) - key = `${key}|${key}` + if (bool) { + // Currently none of our multi-type configs with boolean values default to + // false so all their hints should show `--no-`, if we ever add ones that + // default to false we can branch the logic here + key = `--no-${def.key}|${key}` + } const usage = `${key} ${description}` if (multiple) diff --git a/lib/utils/config/definitions.js b/lib/utils/config/definitions.js index d540b0fc67e82..abe6bda70d8bc 100644 --- a/lib/utils/config/definitions.js +++ b/lib/utils/config/definitions.js @@ -203,10 +203,10 @@ define('audit', { default: true, type: Boolean, description: ` - When "true" submit audit reports alongside \`npm install\` runs to the + When "true" submit audit reports alongside the current npm command to the default registry and all registries configured for scopes. See the - documentation for [\`npm audit\`](/commands/npm-audit) for details on - what is submitted. + documentation for [\`npm audit\`](/commands/npm-audit) for details on what + is submitted. `, flatten, }) @@ -440,6 +440,7 @@ define('cidr', { define('color', { default: !process.env.NO_COLOR || process.env.NO_COLOR === '0', + usage: '--color|--no-color|--color always', defaultDescription: ` true unless the NO_COLOR environ is set to something other than '0' `, @@ -715,6 +716,7 @@ define('force', { * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set \`--yes\` during \`npm init\`. + * Allow clobbering existing values in \`npm pkg\` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -1029,6 +1031,9 @@ define('json', { description: ` Whether or not to output JSON data, rather than the normal output. + * In \`npm pkg set\` it enables parsing set values with JSON.parse() + before saving them to your \`package.json\`. + Not supported by all npm commands. `, flatten, @@ -1103,6 +1108,31 @@ define('local-address', { flatten, }) +define('location', { + default: 'user', + short: 'L', + type: [ + 'global', + 'user', + 'project', + ], + defaultDescription: ` + "user" unless \`--global\` is passed, which will also set this value to "global" + `, + description: ` + When passed to \`npm config\` this refers to which config file to use. + `, + // NOTE: the flattener here deliberately does not alter the value of global + // for now, this is to avoid inadvertently causing any breakage. the value of + // global, however, does modify this flag. + flatten (key, obj, flatOptions) { + // if global is set, we override ourselves + if (obj.global) + obj.location = 'global' + flatOptions.location = obj.location + }, +}) + define('loglevel', { default: 'notice', type: [ @@ -1200,7 +1230,10 @@ define('noproxy', { Also accepts a comma-delimited string. `, flatten (key, obj, flatOptions) { - flatOptions.noProxy = obj[key].join(',') + if (Array.isArray(obj[key])) + flatOptions.noProxy = obj[key].join(',') + else + flatOptions.noProxy = obj[key] }, }) diff --git a/lib/utils/error-message.js b/lib/utils/error-message.js index 125cdf8c53581..3b590f712e783 100644 --- a/lib/utils/error-message.js +++ b/lib/utils/error-message.js @@ -269,6 +269,7 @@ module.exports = (er, npm) => { case 'ECONNRESET': case 'ENOTFOUND': case 'ETIMEDOUT': + case 'ERR_SOCKET_TIMEOUT': case 'EAI_FAIL': short.push(['network', er.message]) detail.push([ diff --git a/lib/utils/exit-handler.js b/lib/utils/exit-handler.js index 931527704b9b7..95c9655a716cf 100644 --- a/lib/utils/exit-handler.js +++ b/lib/utils/exit-handler.js @@ -1,4 +1,3 @@ -const log = require('npmlog') const os = require('os') const path = require('path') const writeFileAtomic = require('write-file-atomic') @@ -13,8 +12,6 @@ let logFileName let npm // set by the cli let wroteLogFile = false -const timings = {} - const getLogFile = () => { // we call this multiple times, so we need to treat it as a singleton because // the date is part of the name @@ -24,17 +21,15 @@ const getLogFile = () => { return logFileName } -process.on('timing', (name, value) => { - if (timings[name]) - timings[name] += value - else - timings[name] = value -}) - process.on('exit', code => { + // process.emit is synchronous, so the timeEnd handler will run before the + // unfinished timer check below process.emit('timeEnd', 'npm') - log.disableProgress() - if (npm.config && npm.config.loaded && npm.config.get('timing')) { + npm.log.disableProgress() + for (const [name, timers] of npm.timers) + npm.log.verbose('unfinished npm timer', name, timers) + + if (npm.config.loaded && npm.config.get('timing')) { try { const file = path.resolve(npm.config.get('cache'), '_timing.json') const dir = path.dirname(npm.config.get('cache')) @@ -44,7 +39,7 @@ process.on('exit', code => { command: process.argv.slice(2), logfile: getLogFile(), version: npm.version, - ...timings, + ...npm.timings, }) + '\n') const st = fs.lstatSync(path.dirname(npm.config.get('cache'))) @@ -56,27 +51,27 @@ process.on('exit', code => { } if (!code) - log.info('ok') + npm.log.info('ok') else { - log.verbose('code', code) + npm.log.verbose('code', code) if (!exitHandlerCalled) { - log.error('', 'Exit handler never called!') + npm.log.error('', 'Exit handler never called!') console.error('') - log.error('', 'This is an error with npm itself. Please report this error at:') - log.error('', ' ') + npm.log.error('', 'This is an error with npm itself. Please report this error at:') + npm.log.error('', ' ') // TODO this doesn't have an npm.config.loaded guard writeLogFile() } } // In timing mode we always write the log file - if (npm.config && npm.config.loaded && npm.config.get('timing') && !wroteLogFile) + if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) writeLogFile() if (wroteLogFile) { // just a line break - if (log.levels[log.level] <= log.levels.error) + if (npm.log.levels[npm.log.level] <= npm.log.levels.error) console.error('') - log.error( + npm.log.error( '', [ 'A complete log of this run can be found in:', @@ -88,121 +83,114 @@ process.on('exit', code => { // these are needed for the tests to have a clean slate in each test case exitHandlerCalled = false wroteLogFile = false - - // actually exit. - process.exit(code) }) -const exit = (code, noLog) => { - log.verbose('exit', code || 0) - if (log.level === 'silent') - noLog = true - - // noLog is true if there was an error, including if config wasn't loaded, so - // this doesn't need a config.loaded guard - if (code && !noLog) - writeLogFile() - - // Exit directly -- nothing in the CLI should still be running in the - // background at this point, and this makes sure anything left dangling - // for whatever reason gets thrown away, instead of leaving the CLI open - process.stdout.write('', () => { - process.exit(code) - }) -} - const exitHandler = (err) => { - log.disableProgress() - if (!npm.config || !npm.config.loaded) { - // logging won't work unless we pretend that it's ready + npm.log.disableProgress() + if (!npm.config.loaded) { err = err || new Error('Exit prior to config file resolving.') console.error(err.stack || err.message) } - if (exitHandlerCalled) - err = err || new Error('Exit handler called more than once.') - - // only show the notification if it finished before the other stuff we - // were doing. no need to hang on `npm -v` or something. + // only show the notification if it finished. if (typeof npm.updateNotification === 'string') { - const { level } = log - log.level = log.levels.notice - log.notice('', npm.updateNotification) - log.level = level + const { level } = npm.log + npm.log.level = 'notice' + npm.log.notice('', npm.updateNotification) + npm.log.level = level } exitHandlerCalled = true - if (!err) - return exit() - - // if we got a command that just shells out to something else, then it - // will presumably print its own errors and exit with a proper status - // code if there's a problem. If we got an error with a code=0, then... - // something else went wrong along the way, so maybe an npm problem? - const isShellout = npm.shelloutCommands.includes(npm.command) - const quietShellout = isShellout && typeof err.code === 'number' && err.code - if (quietShellout) - return exit(err.code, true) - else if (typeof err === 'string') { - log.error('', err) - return exit(1, true) - } else if (!(err instanceof Error)) { - log.error('weird error', err) - return exit(1, true) - } - - if (!err.code) { - const matchErrorCode = err.message.match(/^(?:Error: )?(E[A-Z]+)/) - err.code = matchErrorCode && matchErrorCode[1] - } - for (const k of ['type', 'stack', 'statusCode', 'pkgid']) { - const v = err[k] - if (v) - log.verbose(k, replaceInfo(v)) + let exitCode + let noLog + + if (err) { + exitCode = 1 + // if we got a command that just shells out to something else, then it + // will presumably print its own errors and exit with a proper status + // code if there's a problem. If we got an error with a code=0, then... + // something else went wrong along the way, so maybe an npm problem? + const isShellout = npm.shelloutCommands.includes(npm.command) + const quietShellout = isShellout && typeof err.code === 'number' && err.code + if (quietShellout) { + exitCode = err.code + noLog = true + } else if (typeof err === 'string') { + noLog = true + npm.log.error('', err) + } else if (!(err instanceof Error)) { + noLog = true + npm.log.error('weird error', err) + } else { + if (!err.code) { + const matchErrorCode = err.message.match(/^(?:Error: )?(E[A-Z]+)/) + err.code = matchErrorCode && matchErrorCode[1] + } + + for (const k of ['type', 'stack', 'statusCode', 'pkgid']) { + const v = err[k] + if (v) + npm.log.verbose(k, replaceInfo(v)) + } + + npm.log.verbose('cwd', process.cwd()) + + const args = replaceInfo(process.argv) + npm.log.verbose('', os.type() + ' ' + os.release()) + npm.log.verbose('argv', args.map(JSON.stringify).join(' ')) + npm.log.verbose('node', process.version) + npm.log.verbose('npm ', 'v' + npm.version) + + for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) { + const v = err[k] + if (v) + npm.log.error(k, v) + } + + const msg = errorMessage(err, npm) + for (const errline of [...msg.summary, ...msg.detail]) + npm.log.error(...errline) + + if (npm.config.loaded && npm.config.get('json')) { + const error = { + error: { + code: err.code, + summary: messageText(msg.summary), + detail: messageText(msg.detail), + }, + } + console.error(JSON.stringify(error, null, 2)) + } + + if (typeof err.errno === 'number') + exitCode = err.errno + else if (typeof err.code === 'number') + exitCode = err.code + } } + npm.log.verbose('exit', exitCode || 0) - log.verbose('cwd', process.cwd()) - - const args = replaceInfo(process.argv) - log.verbose('', os.type() + ' ' + os.release()) - log.verbose('argv', args.map(JSON.stringify).join(' ')) - log.verbose('node', process.version) - log.verbose('npm ', 'v' + npm.version) - - for (const k of ['code', 'syscall', 'file', 'path', 'dest', 'errno']) { - const v = err[k] - if (v) - log.error(k, v) - } + if (npm.log.level === 'silent') + noLog = true - const msg = errorMessage(err, npm) - for (const errline of [...msg.summary, ...msg.detail]) - log.error(...errline) - - if (npm.config && npm.config.get('json')) { - const error = { - error: { - code: err.code, - summary: messageText(msg.summary), - detail: messageText(msg.detail), - }, - } - console.error(JSON.stringify(error, null, 2)) - } + // noLog is true if there was an error, including if config wasn't loaded, so + // this doesn't need a config.loaded guard + if (exitCode && !noLog) + writeLogFile() - exit(typeof err.errno === 'number' ? err.errno : typeof err.code === 'number' ? err.code : 1) + // explicitly call process.exit now so we don't hang on things like the + // update notifier, also flush stdout beforehand because process.exit doesn't + // wait for that to happen. + process.stdout.write('', () => process.exit(exitCode)) } const messageText = msg => msg.map(line => line.slice(1).join(' ')).join('\n') const writeLogFile = () => { - if (wroteLogFile) - return - try { let logOutput = '' - log.record.forEach(m => { + npm.log.record.forEach(m => { const p = [m.id, m.level] if (m.prefix) p.push(m.prefix) @@ -225,7 +213,7 @@ const writeLogFile = () => { fs.chownSync(file, st.uid, st.gid) // truncate once it's been written. - log.record.length = 0 + npm.log.record.length = 0 wroteLogFile = true } catch (ex) { diff --git a/lib/utils/perf.js b/lib/utils/perf.js deleted file mode 100644 index 4961054d909ad..0000000000000 --- a/lib/utils/perf.js +++ /dev/null @@ -1,23 +0,0 @@ -const log = require('npmlog') -const timings = new Map() - -process.on('time', (name) => { - timings.set(name, Date.now()) -}) - -process.on('timeEnd', (name) => { - if (timings.has(name)) { - const ms = Date.now() - timings.get(name) - process.emit('timing', name, ms) - log.timing(name, `Completed in ${ms}ms`) - timings.delete(name) - } else - log.silly('timing', "Tried to end timer that doesn't exist:", name) -}) - -// for tests -/* istanbul ignore next */ -exports.reset = () => { - process.removeAllListeners('time') - process.removeAllListeners('timeEnd') -} diff --git a/lib/utils/queryable.js b/lib/utils/queryable.js new file mode 100644 index 0000000000000..e10eba3b5f092 --- /dev/null +++ b/lib/utils/queryable.js @@ -0,0 +1,314 @@ +const util = require('util') +const _data = Symbol('data') +const _delete = Symbol('delete') +const _append = Symbol('append') + +const sqBracketsMatcher = str => str.match(/(.+)\[([^\]]+)\]\.?(.*)$/) + +// replaces any occurence of an empty-brackets (e.g: []) with a special +// Symbol(append) to represent it, this is going to be useful for the setter +// method that will push values to the end of the array when finding these +const replaceAppendSymbols = str => { + const matchEmptyBracket = str.match(/^(.*)\[\]\.?(.*)$/) + + if (matchEmptyBracket) { + const [, pre, post] = matchEmptyBracket + return [...replaceAppendSymbols(pre), _append, post].filter(Boolean) + } + + return [str] +} + +const parseKeys = (key) => { + const sqBracketItems = new Set() + sqBracketItems.add(_append) + const parseSqBrackets = (str) => { + const index = sqBracketsMatcher(str) + + // once we find square brackets, we recursively parse all these + if (index) { + const preSqBracketPortion = index[1] + + // we want to have a `new String` wrapper here in order to differentiate + // between multiple occurences of the same string, e.g: + // foo.bar[foo.bar] should split into { foo: { bar: { 'foo.bar': {} } } + /* eslint-disable-next-line no-new-wrappers */ + const foundKey = new String(index[2]) + const postSqBracketPortion = index[3] + + // we keep track of items found during this step to make sure + // we don't try to split-separate keys that were defined within + // square brackets, since the key name itself might contain dots + sqBracketItems.add(foundKey) + + // returns an array that contains either dot-separate items (that will + // be splitted appart during the next step OR the fully parsed keys + // read from square brackets, e.g: + // foo.bar[1.0.0].a.b -> ['foo.bar', '1.0.0', 'a.b'] + return [ + ...parseSqBrackets(preSqBracketPortion), + foundKey, + ...( + postSqBracketPortion + ? parseSqBrackets(postSqBracketPortion) + : [] + ), + ] + } + + // at the end of parsing, any usage of the special empty-bracket syntax + // (e.g: foo.array[]) has not yet been parsed, here we'll take care + // of parsing it and adding a special symbol to represent it in + // the resulting list of keys + return replaceAppendSymbols(str) + } + + const res = [] + // starts by parsing items defined as square brackets, those might be + // representing properties that have a dot in the name or just array + // indexes, e.g: foo[1.0.0] or list[0] + const sqBracketKeys = parseSqBrackets(key.trim()) + + for (const k of sqBracketKeys) { + // keys parsed from square brackets should just be added to list of + // resulting keys as they might have dots as part of the key + if (sqBracketItems.has(k)) + res.push(k) + else { + // splits the dot-sep property names and add them to the list of keys + for (const splitKey of k.split('.')) + /* eslint-disable-next-line no-new-wrappers */ + res.push(new String(splitKey)) + } + } + + // returns an ordered list of strings in which each entry + // represents a key in an object defined by the previous entry + return res +} + +const getter = ({ data, key }) => { + // keys are a list in which each entry represents the name of + // a property that should be walked through the object in order to + // return the final found value + const keys = parseKeys(key) + let _data = data + let label = '' + + for (const k of keys) { + // empty-bracket-shortcut-syntax is not supported on getter + if (k === _append) { + throw Object.assign( + new Error('Empty brackets are not valid syntax for retrieving values.'), + { code: 'EINVALIDSYNTAX' } + ) + } + + // extra logic to take into account printing array, along with its + // special syntax in which using a dot-sep property name after an + // arry will expand it's results, e.g: + // arr.name -> arr[0].name=value, arr[1].name=value, ... + const maybeIndex = Number(k) + if (Array.isArray(_data) && !Number.isInteger(maybeIndex)) { + _data = _data.reduce((acc, i, index) => { + acc[`${label}[${index}].${k}`] = i[k] + return acc + }, {}) + return _data + } else { + // if can't find any more values, it means it's just over + // and there's nothing to return + if (!_data[k]) + return undefined + + // otherwise sets the next value + _data = _data[k] + } + + label += k + } + + // these are some legacy expectations from + // the old API consumed by lib/view.js + if (Array.isArray(_data) && _data.length <= 1) + _data = _data[0] + + return { + [key]: _data, + } +} + +const setter = ({ data, key, value, force }) => { + // setter goes to recursively transform the provided data obj, + // setting properties from the list of parsed keys, e.g: + // ['foo', 'bar', 'baz'] -> { foo: { bar: { baz: {} } } + const keys = parseKeys(key) + const setKeys = (_data, _key) => { + // handles array indexes, converting valid integers to numbers, + // note that occurences of Symbol(append) will throw, + // so we just ignore these for now + let maybeIndex = Number.NaN + try { + maybeIndex = Number(_key) + } catch (err) {} + if (!Number.isNaN(maybeIndex)) + _key = maybeIndex + + // creates new array in case key is an index + // and the array obj is not yet defined + const keyIsAnArrayIndex = _key === maybeIndex || _key === _append + const dataHasNoItems = !Object.keys(_data).length + if (keyIsAnArrayIndex && dataHasNoItems && !Array.isArray(_data)) + _data = [] + + // converting from array to an object is also possible, in case the + // user is using force mode, we should also convert existing arrays + // to an empty object if the current _data is an array + if (force && Array.isArray(_data) && !keyIsAnArrayIndex) + _data = { ..._data } + + // the _append key is a special key that is used to represent + // the empty-bracket notation, e.g: arr[] -> arr[arr.length] + if (_key === _append) { + if (!Array.isArray(_data)) { + throw Object.assign( + new Error(`Can't use append syntax in non-Array element`), + { code: 'ENOAPPEND' } + ) + } + _key = _data.length + } + + // retrieves the next data object to recursively iterate on, + // throws if trying to override a literal value or add props to an array + const next = () => { + const haveContents = + !force && + _data[_key] != null && + value !== _delete + const shouldNotOverrideLiteralValue = + !(typeof _data[_key] === 'object') + // if the next obj to recurse is an array and the next key to be + // appended to the resulting obj is not an array index, then it + // should throw since we can't append arbitrary props to arrays + const shouldNotAddPropsToArrays = + typeof keys[0] !== 'symbol' && + Array.isArray(_data[_key]) && + Number.isNaN(Number(keys[0])) + + const overrideError = + haveContents && + shouldNotOverrideLiteralValue + if (overrideError) { + throw Object.assign( + new Error(`Property ${_key} already exists and is not an Array or Object.`), + { code: 'EOVERRIDEVALUE' } + ) + } + + const addPropsToArrayError = + haveContents && + shouldNotAddPropsToArrays + if (addPropsToArrayError) { + throw Object.assign( + new Error(`Can't add property ${key} to an Array.`), + { code: 'ENOADDPROP' } + ) + } + + return typeof _data[_key] === 'object' ? _data[_key] || {} : {} + } + + // sets items from the parsed array of keys as objects, recurses to + // setKeys in case there are still items to be handled, otherwise it + // just sets the original value set by the user + if (keys.length) + _data[_key] = setKeys(next(), keys.shift()) + else { + // handles special deletion cases for obj props / array items + if (value === _delete) { + if (Array.isArray(_data)) + _data.splice(_key, 1) + else + delete _data[_key] + } else + // finally, sets the value in its right place + _data[_key] = value + } + + return _data + } + + setKeys(data, keys.shift()) +} + +class Queryable { + constructor (obj) { + if (!obj || typeof obj !== 'object') { + throw Object.assign( + new Error('Queryable needs an object to query properties from.'), + { code: 'ENOQUERYABLEOBJ' } + ) + } + + this[_data] = obj + } + + query (queries) { + // this ugly interface here is meant to be a compatibility layer + // with the legacy API lib/view.js is consuming, if at some point + // we refactor that command then we can revisit making this nicer + if (queries === '') + return { '': this[_data] } + + const q = query => getter({ + data: this[_data], + key: query, + }) + + if (Array.isArray(queries)) { + let res = {} + for (const query of queries) + res = { ...res, ...q(query) } + return res + } else + return q(queries) + } + + // return the value for a single query if found, otherwise returns undefined + get (query) { + const obj = this.query(query) + if (obj) + return obj[query] + } + + // creates objects along the way for the provided `query` parameter + // and assigns `value` to the last property of the query chain + set (query, value, { force } = {}) { + setter({ + data: this[_data], + key: query, + value, + force, + }) + } + + // deletes the value of the property found at `query` + delete (query) { + setter({ + data: this[_data], + key: query, + value: _delete, + }) + } + + toJSON () { + return this[_data] + } + + [util.inspect.custom] () { + return this.toJSON() + } +} + +module.exports = Queryable diff --git a/lib/utils/update-notifier.js b/lib/utils/update-notifier.js index ed5806ced2a7d..14c4fac0d58b3 100644 --- a/lib/utils/update-notifier.js +++ b/lib/utils/update-notifier.js @@ -33,12 +33,6 @@ const checkTimeout = async (npm, duration) => { return t > st.mtime } -const updateTimeout = async npm => { - // best effort, if this fails, it's ok. - // might be using /dev/null as the cache or something weird like that. - await writeFile(lastCheckedFile(npm), '').catch(() => {}) -} - const updateNotifier = async (npm, spec = 'latest') => { // never check for updates in CI, when updating npm already, or opted out if (!npm.config.get('update-notifier') || @@ -111,15 +105,16 @@ const updateNotifier = async (npm, spec = 'latest') => { `${oldc} -> ${latestc}\n` + `Changelog: ${changelogc}\n` + `Run ${cmdc} to update!\n` - const messagec = !useColor ? message : chalk.bgBlack.white(message) - return messagec + return message } // only update the notification timeout if we actually finished checking module.exports = async npm => { const notification = await updateNotifier(npm) - // intentional. do not await this. it's a best-effort update. - updateTimeout(npm) + // intentional. do not await this. it's a best-effort update. if this + // fails, it's ok. might be using /dev/null as the cache or something weird + // like that. + writeFile(lastCheckedFile(npm), '').catch(() => {}) npm.updateNotification = notification } diff --git a/lib/view.js b/lib/view.js index 788df3ed0b4d8..47e631f5565c0 100644 --- a/lib/view.js +++ b/lib/view.js @@ -17,6 +17,7 @@ const { packument } = require('pacote') const readFile = promisify(fs.readFile) const readJson = async file => jsonParse(await readFile(file, 'utf8')) +const Queryable = require('./utils/queryable.js') const BaseCommand = require('./base-command.js') class View extends BaseCommand { /* istanbul ignore next - see test/lib/load-all-commands.js */ @@ -459,56 +460,13 @@ function showFields (data, version, fields) { o[k] = s[k] }) }) - return search(o, fields.split('.'), version.version, fields) -} -function search (data, fields, version, title) { - let field - const tail = fields - while (!field && fields.length) - field = tail.shift() - fields = [field].concat(tail) - let o - if (!field && !tail.length) { - o = {} - o[version] = {} - o[version][title] = data - return o - } - let index = field.match(/(.+)\[([^\]]+)\]$/) - if (index) { - field = index[1] - index = index[2] - if (data[field] && data[field][index]) - return search(data[field][index], tail, version, title) - else - field = field + '[' + index + ']' - } - if (Array.isArray(data)) { - if (data.length === 1) - return search(data[0], fields, version, title) - - let results = [] - data.forEach((data, i) => { - const tl = title.length - const newt = title.substr(0, tl - fields.join('.').length - 1) + - '[' + i + ']' + [''].concat(fields).join('.') - results.push(search(data, fields.slice(), version, newt)) - }) - results = results.reduce(reducer, {}) - return results - } - if (!data[field]) - return undefined - data = data[field] - if (tail.length) { - // there are more fields to deal with. - return search(data, tail, version, title) - } - o = {} - o[version] = {} - o[version][title] = data - return o + const queryable = new Queryable(o) + const s = queryable.query(fields) + const res = { [version.version]: s } + + if (s) + return res } function cleanup (data) { diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 37432a3d73d2e..7ee129c97fe71 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -2,7 +2,9 @@ /.package-lock.json package-lock.json CHANGELOG* +changelog* README* +readme* .editorconfig .idea/ .npmignore @@ -48,6 +50,8 @@ README* /@babel/types /@blueoak/list /@eslint/eslintrc +/@humanwhocodes/config-array +/@humanwhocodes/object-schema /@istanbuljs/load-nyc-config /@istanbuljs/schema /@mdx-js/mdx diff --git a/node_modules/@humanwhocodes/config-array/LICENSE b/node_modules/@humanwhocodes/config-array/LICENSE new file mode 100644 index 0000000000000..261eeb9e9f8b2 --- /dev/null +++ b/node_modules/@humanwhocodes/config-array/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@humanwhocodes/config-array/api.js b/node_modules/@humanwhocodes/config-array/api.js new file mode 100644 index 0000000000000..a9aacf46b5cd4 --- /dev/null +++ b/node_modules/@humanwhocodes/config-array/api.js @@ -0,0 +1,457 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var path = _interopDefault(require('path')); +var minimatch = _interopDefault(require('minimatch')); +var createDebug = _interopDefault(require('debug')); +var objectSchema = require('@humanwhocodes/object-schema'); + +/** + * @fileoverview ConfigSchema + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +/** + * Assets that a given value is an array. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array. + */ +function assertIsArray(value) { + if (!Array.isArray(value)) { + throw new TypeError('Expected value to be an array.'); + } +} + +/** + * Assets that a given value is an array containing only strings and functions. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array of strings and functions. + */ +function assertIsArrayOfStringsAndFunctions(value, name) { + assertIsArray(value); + + if (value.some(item => typeof item !== 'string' && typeof item !== 'function')) { + throw new TypeError('Expected array to only contain strings.'); + } +} + +//------------------------------------------------------------------------------ +// Exports +//------------------------------------------------------------------------------ + +/** + * The base schema that every ConfigArray uses. + * @type Object + */ +const baseSchema = Object.freeze({ + name: { + required: false, + merge() { + return undefined; + }, + validate(value) { + if (typeof value !== 'string') { + throw new TypeError('Property must be a string.'); + } + } + }, + files: { + required: false, + merge() { + return undefined; + }, + validate(value) { + + // first check if it's an array + assertIsArray(value); + + // then check each member + value.forEach(item => { + if (Array.isArray(item)) { + assertIsArrayOfStringsAndFunctions(item); + } else if (typeof item !== 'string' && typeof item !== 'function') { + throw new TypeError('Items must be a string, a function, or an array of strings and functions.'); + } + }); + + } + }, + ignores: { + required: false, + merge() { + return undefined; + }, + validate: assertIsArrayOfStringsAndFunctions + } +}); + +/** + * @fileoverview ConfigArray + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +const debug = createDebug('@hwc/config-array'); + +const MINIMATCH_OPTIONS = { + matchBase: true +}; + +/** + * Shorthand for checking if a value is a string. + * @param {any} value The value to check. + * @returns {boolean} True if a string, false if not. + */ +function isString(value) { + return typeof value === 'string'; +} + +/** + * Normalizes a `ConfigArray` by flattening it and executing any functions + * that are found inside. + * @param {Array} items The items in a `ConfigArray`. + * @param {Object} context The context object to pass into any function + * found. + * @returns {Array} A flattened array containing only config objects. + * @throws {TypeError} When a config function returns a function. + */ +async function normalize(items, context) { + + // TODO: Allow async config functions + + function *flatTraverse(array) { + for (let item of array) { + if (typeof item === 'function') { + item = item(context); + } + + if (Array.isArray(item)) { + yield * flatTraverse(item); + } else if (typeof item === 'function') { + throw new TypeError('A config function can only return an object or array.'); + } else { + yield item; + } + } + } + + return [...flatTraverse(items)]; +} + +/** + * Determines if a given file path is matched by a config. If the config + * has no `files` field, then it matches; otherwise, if a `files` field + * is present then we match the globs in `files` and exclude any globs in + * `ignores`. + * @param {string} filePath The absolute file path to check. + * @param {Object} config The config object to check. + * @returns {boolean} True if the file path is matched by the config, + * false if not. + */ +function pathMatches(filePath, basePath, config) { + + // a config without a `files` field always matches + if (!config.files) { + return true; + } + + // if files isn't an array, throw an error + if (!Array.isArray(config.files) || config.files.length === 0) { + throw new TypeError('The files key must be a non-empty array.'); + } + + const relativeFilePath = path.relative(basePath, filePath); + + // match both strings and functions + const match = pattern => { + if (isString(pattern)) { + return minimatch(relativeFilePath, pattern, MINIMATCH_OPTIONS); + } + + if (typeof pattern === 'function') { + return pattern(filePath); + } + }; + + // check for all matches to config.files + let matches = config.files.some(pattern => { + if (Array.isArray(pattern)) { + return pattern.every(match); + } + + return match(pattern); + }); + + /* + * If the file path matches the config.files patterns, then check to see + * if there are any files to ignore. + */ + if (matches && config.ignores) { + matches = !config.ignores.some(pattern => { + return minimatch(filePath, pattern, MINIMATCH_OPTIONS); + }); + } + + return matches; +} + +/** + * Ensures that a ConfigArray has been normalized. + * @param {ConfigArray} configArray The ConfigArray to check. + * @returns {void} + * @throws {Error} When the `ConfigArray` is not normalized. + */ +function assertNormalized(configArray) { + // TODO: Throw more verbose error + if (!configArray.isNormalized()) { + throw new Error('ConfigArray must be normalized to perform this operation.'); + } +} + +//------------------------------------------------------------------------------ +// Public Interface +//------------------------------------------------------------------------------ + +const ConfigArraySymbol = { + isNormalized: Symbol('isNormalized'), + configCache: Symbol('configCache'), + schema: Symbol('schema'), + finalizeConfig: Symbol('finalizeConfig'), + preprocessConfig: Symbol('preprocessConfig') +}; + +/** + * Represents an array of config objects and provides method for working with + * those config objects. + */ +class ConfigArray extends Array { + + /** + * Creates a new instance of ConfigArray. + * @param {Iterable|Function|Object} configs An iterable yielding config + * objects, or a config function, or a config object. + * @param {string} [options.basePath=""] The path of the config file + * @param {boolean} [options.normalized=false] Flag indicating if the + * configs have already been normalized. + * @param {Object} [options.schema] The additional schema + * definitions to use for the ConfigArray schema. + */ + constructor(configs, { basePath = '', normalized = false, schema: customSchema } = {}) { + super(); + + /** + * Tracks if the array has been normalized. + * @property isNormalized + * @type boolean + * @private + */ + this[ConfigArraySymbol.isNormalized] = normalized; + + /** + * The schema used for validating and merging configs. + * @property schema + * @type ObjectSchema + * @private + */ + this[ConfigArraySymbol.schema] = new objectSchema.ObjectSchema({ + ...customSchema, + ...baseSchema + }); + + /** + * The path of the config file that this array was loaded from. + * This is used to calculate filename matches. + * @property basePath + * @type string + */ + this.basePath = basePath; + + /** + * A cache to store calculated configs for faster repeat lookup. + * @property configCache + * @type Map + * @private + */ + this[ConfigArraySymbol.configCache] = new Map(); + + // load the configs into this array + if (Array.isArray(configs)) { + this.push(...configs); + } else { + this.push(configs); + } + + } + + /** + * Prevent normal array methods from creating a new `ConfigArray` instance. + * This is to ensure that methods such as `slice()` won't try to create a + * new instance of `ConfigArray` behind the scenes as doing so may throw + * an error due to the different constructor signature. + * @returns {Function} The `Array` constructor. + */ + static get [Symbol.species]() { + return Array; + } + + /** + * Returns the `files` globs from every config object in the array. + * Negated patterns (those beginning with `!`) are not returned. + * This can be used to determine which files will be matched by a + * config array or to use as a glob pattern when no patterns are provided + * for a command line interface. + * @returns {string[]} An array of string patterns. + */ + get files() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.files) { + config.files.forEach(filePattern => { + if (Array.isArray(filePattern)) { + result.push(...filePattern.filter(pattern => { + return isString(pattern) && !pattern.startsWith('!'); + })); + } else if (isString(filePattern) && !filePattern.startsWith('!')) { + result.push(filePattern); + } + }); + } + } + + return result; + } + + /** + * Returns the file globs that should always be ignored regardless of + * the matching `files` fields in any configs. This is necessary to mimic + * the behavior of things like .gitignore and .eslintignore, allowing a + * globbing operation to be faster. + * @returns {string[]} An array of string patterns to be ignored. + */ + get ignores() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.ignores && !config.files) { + result.push(...config.ignores.filter(isString)); + } + } + + return result; + } + + /** + * Indicates if the config array has been normalized. + * @returns {boolean} True if the config array is normalized, false if not. + */ + isNormalized() { + return this[ConfigArraySymbol.isNormalized]; + } + + /** + * Normalizes a config array by flattening embedded arrays and executing + * config functions. + * @param {ConfigContext} context The context object for config functions. + * @returns {ConfigArray} A new ConfigArray instance that is normalized. + */ + async normalize(context = {}) { + + if (!this.isNormalized()) { + const normalizedConfigs = await normalize(this, context); + this.length = 0; + this.push(...normalizedConfigs.map(this[ConfigArraySymbol.preprocessConfig])); + this[ConfigArraySymbol.isNormalized] = true; + + // prevent further changes + Object.freeze(this); + } + + return this; + } + + /** + * Finalizes the state of a config before being cached and returned by + * `getConfig()`. Does nothing by default but is provided to be + * overridden by subclasses as necessary. + * @param {Object} config The config to finalize. + * @returns {Object} The finalized config. + */ + [ConfigArraySymbol.finalizeConfig](config) { + return config; + } + + /** + * Preprocesses a config during the normalization process. This is the + * method to override if you want to convert an array item before it is + * validated for the first time. For example, if you want to replace a + * string with an object, this is the method to override. + * @param {Object} config The config to preprocess. + * @returns {Object} The config to use in place of the argument. + */ + [ConfigArraySymbol.preprocessConfig](config) { + return config; + } + + /** + * Returns the config object for a given file path. + * @param {string} filePath The complete path of a file to get a config for. + * @returns {Object} The config object for this file. + */ + getConfig(filePath) { + + assertNormalized(this); + + // first check the cache to avoid duplicate work + let finalConfig = this[ConfigArraySymbol.configCache].get(filePath); + + if (finalConfig) { + return finalConfig; + } + + // No config found in cache, so calculate a new one + + const matchingConfigs = []; + + for (const config of this) { + if (pathMatches(filePath, this.basePath, config)) { + debug(`Matching config found for ${filePath}`); + matchingConfigs.push(config); + } else { + debug(`No matching config found for ${filePath}`); + } + } + + finalConfig = matchingConfigs.reduce((result, config) => { + return this[ConfigArraySymbol.schema].merge(result, config); + }, {}, this); + + finalConfig = this[ConfigArraySymbol.finalizeConfig](finalConfig); + + this[ConfigArraySymbol.configCache].set(filePath, finalConfig); + + return finalConfig; + } + +} + +exports.ConfigArray = ConfigArray; +exports.ConfigArraySymbol = ConfigArraySymbol; diff --git a/node_modules/@humanwhocodes/config-array/package.json b/node_modules/@humanwhocodes/config-array/package.json new file mode 100644 index 0000000000000..4dc3a4e5d48ab --- /dev/null +++ b/node_modules/@humanwhocodes/config-array/package.json @@ -0,0 +1,61 @@ +{ + "name": "@humanwhocodes/config-array", + "version": "0.5.0", + "description": "Glob-based configuration matching.", + "author": "Nicholas C. Zakas", + "main": "api.js", + "files": [ + "api.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/config-array.git" + }, + "bugs": { + "url": "https://github.com/humanwhocodes/config-array/issues" + }, + "homepage": "https://github.com/humanwhocodes/config-array#readme", + "scripts": { + "build": "rollup -c", + "format": "nitpik", + "lint": "eslint *.config.js src/*.js tests/*.js", + "prepublish": "npm run build", + "test:coverage": "nyc --include src/*.js npm run test", + "test": "mocha -r esm tests/ --recursive" + }, + "gitHooks": { + "pre-commit": "lint-staged" + }, + "lint-staged": { + "*.js": [ + "nitpik", + "eslint --fix --ignore-pattern '!.eslintrc.js'" + ] + }, + "keywords": [ + "configuration", + "configarray", + "config file" + ], + "license": "Apache-2.0", + "engines": { + "node": ">=10.10.0" + }, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "devDependencies": { + "@nitpik/javascript": "^0.3.3", + "@nitpik/node": "0.0.5", + "chai": "^4.2.0", + "eslint": "^6.7.1", + "esm": "^3.2.25", + "lint-staged": "^10.2.8", + "mocha": "^6.1.4", + "nyc": "^14.1.1", + "rollup": "^1.12.3", + "yorkie": "^2.0.0" + } +} \ No newline at end of file diff --git a/node_modules/@humanwhocodes/object-schema/LICENSE b/node_modules/@humanwhocodes/object-schema/LICENSE new file mode 100644 index 0000000000000..a5e3ae46fdfc2 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Human Who Codes +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@humanwhocodes/object-schema/package.json b/node_modules/@humanwhocodes/object-schema/package.json new file mode 100644 index 0000000000000..ba829090e55bd --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/package.json @@ -0,0 +1,33 @@ +{ + "name": "@humanwhocodes/object-schema", + "version": "1.2.0", + "description": "An object schema merger/validator", + "main": "src/index.js", + "directories": { + "test": "tests" + }, + "scripts": { + "test": "mocha tests/" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/object-schema.git" + }, + "keywords": [ + "object", + "validation", + "schema", + "merge" + ], + "author": "Nicholas C. Zakas", + "license": "BSD-3-Clause", + "bugs": { + "url": "https://github.com/humanwhocodes/object-schema/issues" + }, + "homepage": "https://github.com/humanwhocodes/object-schema#readme", + "devDependencies": { + "chai": "^4.2.0", + "eslint": "^5.13.0", + "mocha": "^5.2.0" + } +} \ No newline at end of file diff --git a/node_modules/@humanwhocodes/object-schema/src/index.js b/node_modules/@humanwhocodes/object-schema/src/index.js new file mode 100644 index 0000000000000..b2bc4fb96e3cb --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/src/index.js @@ -0,0 +1,7 @@ +/** + * @filedescription Object Schema Package + */ + +exports.ObjectSchema = require("./object-schema").ObjectSchema; +exports.MergeStrategy = require("./merge-strategy").MergeStrategy; +exports.ValidationStrategy = require("./validation-strategy").ValidationStrategy; diff --git a/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js b/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js new file mode 100644 index 0000000000000..82174492764a9 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js @@ -0,0 +1,53 @@ +/** + * @filedescription Merge Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different merge strategies. + */ +class MergeStrategy { + + /** + * Merges two keys by overwriting the first with the second. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value. + */ + static overwrite(value1, value2) { + return value2; + } + + /** + * Merges two keys by replacing the first with the second only if the + * second is defined. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value if it is defined. + */ + static replace(value1, value2) { + if (typeof value2 !== "undefined") { + return value2; + } + + return value1; + } + + /** + * Merges two properties by assigning properties from the second to the first. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} A new object containing properties from both value1 and + * value2. + */ + static assign(value1, value2) { + return Object.assign({}, value1, value2); + } +} + +exports.MergeStrategy = MergeStrategy; diff --git a/node_modules/@humanwhocodes/object-schema/src/object-schema.js b/node_modules/@humanwhocodes/object-schema/src/object-schema.js new file mode 100644 index 0000000000000..25864f5a280cb --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/src/object-schema.js @@ -0,0 +1,239 @@ +/** + * @filedescription Object Schema + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const { MergeStrategy } = require("./merge-strategy"); +const { ValidationStrategy } = require("./validation-strategy"); + +//----------------------------------------------------------------------------- +// Private +//----------------------------------------------------------------------------- + +const strategies = Symbol("strategies"); +const requiredKeys = Symbol("requiredKeys"); + +/** + * Validates a schema strategy. + * @param {string} name The name of the key this strategy is for. + * @param {Object} strategy The strategy for the object key. + * @param {boolean} [strategy.required=true] Whether the key is required. + * @param {string[]} [strategy.requires] Other keys that are required when + * this key is present. + * @param {Function} strategy.merge A method to call when merging two objects + * with the same key. + * @param {Function} strategy.validate A method to call when validating an + * object with the key. + * @returns {void} + * @throws {Error} When the strategy is missing a name. + * @throws {Error} When the strategy is missing a merge() method. + * @throws {Error} When the strategy is missing a validate() method. + */ +function validateDefinition(name, strategy) { + + let hasSchema = false; + if (strategy.schema) { + if (typeof strategy.schema === "object") { + hasSchema = true; + } else { + throw new TypeError("Schema must be an object."); + } + } + + if (typeof strategy.merge === "string") { + if (!(strategy.merge in MergeStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid merge strategy.`); + } + } else if (!hasSchema && typeof strategy.merge !== "function") { + throw new TypeError(`Definition for key "${name}" must have a merge property.`); + } + + if (typeof strategy.validate === "string") { + if (!(strategy.validate in ValidationStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid validation strategy.`); + } + } else if (!hasSchema && typeof strategy.validate !== "function") { + throw new TypeError(`Definition for key "${name}" must have a validate() method.`); + } +} + + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Represents an object validation/merging schema. + */ +class ObjectSchema { + + /** + * Creates a new instance. + */ + constructor(definitions) { + + if (!definitions) { + throw new Error("Schema definitions missing."); + } + + /** + * Track all strategies in the schema by key. + * @type {Map} + * @property strategies + */ + this[strategies] = new Map(); + + /** + * Separately track any keys that are required for faster validation. + * @type {Map} + * @property requiredKeys + */ + this[requiredKeys] = new Map(); + + // add in all strategies + for (const key of Object.keys(definitions)) { + validateDefinition(key, definitions[key]); + + // normalize merge and validate methods if subschema is present + if (typeof definitions[key].schema === "object") { + const schema = new ObjectSchema(definitions[key].schema); + definitions[key] = { + ...definitions[key], + merge(first, second) { + if (first && second) { + return schema.merge(first, second); + } + + return MergeStrategy.assign(first, second); + }, + validate(value) { + ValidationStrategy.object(value); + schema.validate(value); + } + }; + } + + // normalize the merge method in case there's a string + if (typeof definitions[key].merge === "string") { + definitions[key] = { + ...definitions[key], + merge: MergeStrategy[definitions[key].merge] + }; + }; + + // normalize the validate method in case there's a string + if (typeof definitions[key].validate === "string") { + definitions[key] = { + ...definitions[key], + validate: ValidationStrategy[definitions[key].validate] + }; + }; + + this[strategies].set(key, definitions[key]); + + if (definitions[key].required) { + this[requiredKeys].set(key, definitions[key]); + } + } + } + + /** + * Determines if a strategy has been registered for the given object key. + * @param {string} key The object key to find a strategy for. + * @returns {boolean} True if the key has a strategy registered, false if not. + */ + hasKey(key) { + return this[strategies].has(key); + } + + /** + * Merges objects together to create a new object comprised of the keys + * of the all objects. Keys are merged based on the each key's merge + * strategy. + * @param {...Object} objects The objects to merge. + * @returns {Object} A new object with a mix of all objects' keys. + * @throws {Error} If any object is invalid. + */ + merge(...objects) { + + // double check arguments + if (objects.length < 2) { + throw new Error("merge() requires at least two arguments."); + } + + if (objects.some(object => (object == null || typeof object !== "object"))) { + throw new Error("All arguments must be objects."); + } + + return objects.reduce((result, object) => { + + this.validate(object); + + for (const [key, strategy] of this[strategies]) { + try { + if (key in result || key in object) { + const value = strategy.merge.call(this, result[key], object[key]); + if (value !== undefined) { + result[key] = value; + } + } + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + return result; + }, {}); + } + + /** + * Validates an object's keys based on the validate strategy for each key. + * @param {Object} object The object to validate. + * @returns {void} + * @throws {Error} When the object is invalid. + */ + validate(object) { + + // check existing keys first + for (const key of Object.keys(object)) { + + // check to see if the key is defined + if (!this.hasKey(key)) { + throw new Error(`Unexpected key "${key}" found.`); + } + + // validate existing keys + const strategy = this[strategies].get(key); + + // first check to see if any other keys are required + if (Array.isArray(strategy.requires)) { + if (!strategy.requires.every(otherKey => otherKey in object)) { + throw new Error(`Key "${key}" requires keys "${strategy.requires.join("\", \"")}".`); + } + } + + // now apply remaining validation strategy + try { + strategy.validate.call(strategy, object[key]); + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + + // ensure required keys aren't missing + for (const [key] of this[requiredKeys]) { + if (!(key in object)) { + throw new Error(`Missing required key "${key}".`); + } + } + + } +} + +exports.ObjectSchema = ObjectSchema; diff --git a/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js b/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js new file mode 100644 index 0000000000000..ecf918bdd17b7 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js @@ -0,0 +1,102 @@ +/** + * @filedescription Validation Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different validation strategies. + */ +class ValidationStrategy { + + /** + * Validates that a value is an array. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static array(value) { + if (!Array.isArray(value)) { + throw new TypeError("Expected an array."); + } + } + + /** + * Validates that a value is a boolean. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static boolean(value) { + if (typeof value !== "boolean") { + throw new TypeError("Expected a Boolean."); + } + } + + /** + * Validates that a value is a number. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static number(value) { + if (typeof value !== "number") { + throw new TypeError("Expected a number."); + } + } + + /** + * Validates that a value is a object. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static object(value) { + if (!value || typeof value !== "object") { + throw new TypeError("Expected an object."); + } + } + + /** + * Validates that a value is a object or null. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "object?"(value) { + if (typeof value !== "object") { + throw new TypeError("Expected an object or null."); + } + } + + /** + * Validates that a value is a string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static string(value) { + if (typeof value !== "string") { + throw new TypeError("Expected a string."); + } + } + + /** + * Validates that a value is a non-empty string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "string!"(value) { + if (typeof value !== "string" || value.length === 0) { + throw new TypeError("Expected a non-empty string."); + } + } + +} + +exports.ValidationStrategy = ValidationStrategy; diff --git a/node_modules/@humanwhocodes/object-schema/tests/merge-strategy.js b/node_modules/@humanwhocodes/object-schema/tests/merge-strategy.js new file mode 100644 index 0000000000000..84032d7e89ae2 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/tests/merge-strategy.js @@ -0,0 +1,66 @@ +/** + * @filedescription Merge Strategy Tests + */ +/* global it, describe, beforeEach */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const assert = require("chai").assert; +const { MergeStrategy } = require("../src/"); + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +describe("MergeStrategy", () => { + + + describe("overwrite()", () => { + + it("should overwrite the first value with the second when the second is defined", () => { + const result = MergeStrategy.overwrite(1, 2); + assert.strictEqual(result, 2); + }); + + it("should overwrite the first value with the second when the second is undefined", () => { + const result = MergeStrategy.overwrite(1, undefined); + assert.strictEqual(result, undefined); + }); + + }); + + describe("replace()", () => { + + it("should overwrite the first value with the second when the second is defined", () => { + const result = MergeStrategy.replace(1, 2); + assert.strictEqual(result, 2); + }); + + it("should return the first value when the second is undefined", () => { + const result = MergeStrategy.replace(1, undefined); + assert.strictEqual(result, 1); + }); + + }); + + describe("assign()", () => { + + it("should merge properties from two objects when called", () => { + + const object1 = { foo: 1, bar: 3 }; + const object2 = { foo: 2 }; + + const result = MergeStrategy.assign(object1, object2); + assert.deepStrictEqual(result, { + foo: 2, + bar: 3 + }); + }); + + }); + +}); diff --git a/node_modules/@humanwhocodes/object-schema/tests/object-schema.js b/node_modules/@humanwhocodes/object-schema/tests/object-schema.js new file mode 100644 index 0000000000000..bff00600c2ad4 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/tests/object-schema.js @@ -0,0 +1,528 @@ +/** + * @filedescription Object Schema Tests + */ +/* global it, describe, beforeEach */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const assert = require("chai").assert; +const { ObjectSchema } = require("../src/"); + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +describe("ObjectSchema", () => { + + let schema; + + describe("new ObjectSchema()", () => { + + it("should add a new key when a strategy is passed", () => { + schema = new ObjectSchema({ + foo: { + merge() {}, + validate() {} + } + }); + + assert.isTrue(schema.hasKey("foo")); + }); + + it("should throw an error when a strategy is missing a merge() method", () => { + assert.throws(() => { + schema = new ObjectSchema({ + foo: { + validate() { } + } + }); + }, /Definition for key "foo" must have a merge property/); + }); + + it("should throw an error when a strategy is missing a merge() method", () => { + assert.throws(() => { + schema = new ObjectSchema(); + }, /Schema definitions missing/); + }); + + it("should throw an error when a strategy is missing a validate() method", () => { + assert.throws(() => { + schema = new ObjectSchema({ + foo: { + merge() { }, + } + }); + }, /Definition for key "foo" must have a validate\(\) method/); + }); + + it("should throw an error when merge is an invalid string", () => { + assert.throws(() => { + new ObjectSchema({ + foo: { + merge: "bar", + validate() { } + } + }); + }, /key "foo" missing valid merge strategy/); + }); + + it("should throw an error when validate is an invalid string", () => { + assert.throws(() => { + new ObjectSchema({ + foo: { + merge: "assign", + validate: "s" + } + }); + }, /key "foo" missing valid validation strategy/); + }); + + }); + + + describe("merge()", () => { + + it("should throw an error when an unexpected key is found", () => { + let schema = new ObjectSchema({}); + + assert.throws(() => { + schema.merge({ foo: true }, { foo: true }); + }, /Unexpected key "foo"/); + }); + + it("should throw an error when merge() throws an error", () => { + let schema = new ObjectSchema({ + foo: { + merge() { + throw new Error("Boom!"); + }, + validate() {} + } + }); + + assert.throws(() => { + schema.merge({ foo: true }, { foo: true }); + }, /Key "foo": Boom!/); + + }); + + it("should call the merge() strategy for one key when called", () => { + + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() {} + } + }); + + const result = schema.merge({ foo: true }, { foo: false }); + assert.propertyVal(result, "foo", "bar"); + }); + + it("should not call the merge() strategy when both objects don't contain the key", () => { + + let called = false; + + schema = new ObjectSchema({ + foo: { + merge() { + called = true; + }, + validate() {} + } + }); + + schema.merge({}, {}); + assert.isFalse(called, "The merge() strategy should not have been called."); + }); + + it("should omit returning the key when the merge() strategy returns undefined", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return undefined; + }, + validate() { } + } + }); + + const result = schema.merge({ foo: true }, { foo: false }); + assert.notProperty(result, "foo"); + }); + + it("should call the merge() strategy for two keys when called", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() { } + }, + bar: { + merge() { + return "baz"; + }, + validate() {} + } + }); + + const result = schema.merge({ foo: true, bar: 1 }, { foo: true, bar: 2 }); + assert.propertyVal(result, "foo", "bar"); + assert.propertyVal(result, "bar", "baz"); + }); + + it("should call the merge() strategy for two keys when called on three objects", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() { } + }, + bar: { + merge() { + return "baz"; + }, + validate() { } + } + }); + + const result = schema.merge( + { foo: true, bar: 1 }, + { foo: true, bar: 3 }, + { foo: false, bar: 2 } + ); + assert.propertyVal(result, "foo", "bar"); + assert.propertyVal(result, "bar", "baz"); + }); + + it("should call the merge() strategy when defined as 'overwrite'", () => { + schema = new ObjectSchema({ + foo: { + merge: "overwrite", + validate() { } + } + }); + + const result = schema.merge( + { foo: true }, + { foo: false } + ); + assert.propertyVal(result, "foo", false); + }); + + it("should call the merge() strategy when defined as 'assign'", () => { + schema = new ObjectSchema({ + foo: { + merge: "assign", + validate() { } + } + }); + + const result = schema.merge( + { foo: { bar: true } }, + { foo: { baz: false } } + ); + + assert.strictEqual(result.foo.bar, true); + assert.strictEqual(result.foo.baz, false); + }); + + it("should call the merge strategy when there's a subschema", () => { + + schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } + }); + + const result = schema.merge({ + name: { + first: "n", + last: "z" + } + }, { + name: { + first: "g" + } + }); + + assert.strictEqual(result.name.first, "g"); + assert.strictEqual(result.name.last, "z"); + }); + + it("should not error when calling the merge strategy when there's a subschema and no matching key in second object", () => { + + schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } + }); + + const result = schema.merge({ + name: { + first: "n", + last: "z" + } + }, { + }); + + assert.strictEqual(result.name.first, "n"); + assert.strictEqual(result.name.last, "z"); + }); + + + }); + + describe("validate()", () => { + + it("should throw an error when an unexpected key is found", () => { + let schema = new ObjectSchema({}); + assert.throws(() => { + schema.validate({ foo: true }); + }, /Unexpected key "foo"/); + }); + + it("should not throw an error when an expected key is found", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() {} + } + }); + + schema.validate({ foo: true }); + }); + + it("should pass the property value into validate() when key is found", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate(value) { + assert.isTrue(value); + } + } + }); + + schema.validate({ foo: true }); + }); + + it("should not throw an error when expected keys are found", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() {} + }, + bar: { + merge() { + return "baz"; + }, + validate() {} + } + }); + + schema.validate({ foo: true, bar: true }); + }); + + it("should not throw an error when expected keys are found with required keys", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() { } + }, + bar: { + requires: ["foo"], + merge() { + return "baz"; + }, + validate() { } + } + }); + + schema.validate({ foo: true, bar: true }); + }); + + it("should throw an error when expected keys are found without required keys", () => { + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() { } + }, + baz: { + merge() { + return "baz"; + }, + validate() { } + }, + bar: { + name: "bar", + requires: ["foo", "baz"], + merge() { }, + validate() { } + } + }); + + assert.throws(() => { + schema.validate({ bar: true }); + }, /Key "bar" requires keys "foo", "baz"./); + }); + + + it("should throw an error when an expected key is found but is invalid", () => { + + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate() { + throw new Error("Invalid key."); + } + } + }); + + assert.throws(() => { + schema.validate({ foo: true }); + }, /Key "foo": Invalid key/); + }); + + it("should throw an error when an expected key is found but is invalid with a string validator", () => { + + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate: "string" + } + }); + + assert.throws(() => { + schema.validate({ foo: true }); + }, /Key "foo": Expected a string/); + }); + + it("should throw an error when an expected key is found but is invalid with a number validator", () => { + + schema = new ObjectSchema({ + foo: { + merge() { + return "bar"; + }, + validate: "number" + } + }); + + assert.throws(() => { + schema.validate({ foo: true }); + }, /Key "foo": Expected a number/); + }); + + it("should throw an error when a required key is missing", () => { + + schema = new ObjectSchema({ + foo: { + required: true, + merge() { + return "bar"; + }, + validate() {} + } + }); + + assert.throws(() => { + schema.validate({}); + }, /Missing required key "foo"/); + }); + + it("should throw an error when a subschema is provided and the value doesn't validate", () => { + + schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } + }); + + assert.throws(() => { + schema.validate({ + name: { + first: 123, + last: "z" + } + }); + + }, /Key "name": Key "first": Expected a string/); + }); + + it("should not throw an error when a subschema is provided and the value validates", () => { + + schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } + }); + + schema.validate({ + name: { + first: "n", + last: "z" + } + }); + + }); + + }); + +}); diff --git a/node_modules/@humanwhocodes/object-schema/tests/validation-strategy.js b/node_modules/@humanwhocodes/object-schema/tests/validation-strategy.js new file mode 100644 index 0000000000000..5d3e4a24dcf34 --- /dev/null +++ b/node_modules/@humanwhocodes/object-schema/tests/validation-strategy.js @@ -0,0 +1,186 @@ +/** + * @filedescription Merge Strategy Tests + */ +/* global it, describe, beforeEach */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const assert = require("chai").assert; +const { ValidationStrategy } = require("../src/"); + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +describe("ValidationStrategy", () => { + + describe("boolean", () => { + it("should not throw an error when the value is a boolean", () => { + ValidationStrategy.boolean(true); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy.boolean(null); + }, /Expected a Boolean/); + }); + + it("should throw an error when the value is a string", () => { + assert.throws(() => { + ValidationStrategy.boolean("foo"); + }, /Expected a Boolean/); + }); + + it("should throw an error when the value is a number", () => { + assert.throws(() => { + ValidationStrategy.boolean(123); + }, /Expected a Boolean/); + }); + + it("should throw an error when the value is an object", () => { + assert.throws(() => { + ValidationStrategy.boolean({}); + }, /Expected a Boolean/); + }); + }); + + describe("number", () => { + it("should not throw an error when the value is a number", () => { + ValidationStrategy.number(25); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy.number(null); + }, /Expected a number/); + }); + + it("should throw an error when the value is a string", () => { + assert.throws(() => { + ValidationStrategy.number("foo"); + }, /Expected a number/); + }); + + it("should throw an error when the value is a boolean", () => { + assert.throws(() => { + ValidationStrategy.number(true); + }, /Expected a number/); + }); + + it("should throw an error when the value is an object", () => { + assert.throws(() => { + ValidationStrategy.number({}); + }, /Expected a number/); + }); + }); + + describe("object", () => { + it("should not throw an error when the value is an object", () => { + ValidationStrategy.object({}); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy.object(null); + }, /Expected an object/); + }); + + it("should throw an error when the value is a string", () => { + assert.throws(() => { + ValidationStrategy.object(""); + }, /Expected an object/); + }); + }); + + describe("array", () => { + it("should not throw an error when the value is an array", () => { + ValidationStrategy.array([]); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy.array(null); + }, /Expected an array/); + }); + + it("should throw an error when the value is a string", () => { + assert.throws(() => { + ValidationStrategy.array(""); + }, /Expected an array/); + }); + + it("should throw an error when the value is an object", () => { + assert.throws(() => { + ValidationStrategy.array({}); + }, /Expected an array/); + }); + }); + + describe("object?", () => { + it("should not throw an error when the value is an object", () => { + ValidationStrategy["object?"]({}); + }); + + it("should not throw an error when the value is null", () => { + ValidationStrategy["object?"](null); + }); + + it("should throw an error when the value is a string", () => { + assert.throws(() => { + ValidationStrategy["object?"](""); + }, /Expected an object/); + }); + }); + + describe("string", () => { + it("should not throw an error when the value is a string", () => { + ValidationStrategy.string("foo"); + }); + + it("should not throw an error when the value is an empty string", () => { + ValidationStrategy.string(""); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy.string(null); + }, /Expected a string/); + }); + + it("should throw an error when the value is an object", () => { + assert.throws(() => { + ValidationStrategy.string({}); + }, /Expected a string/); + }); + }); + + describe("string!", () => { + it("should not throw an error when the value is an string", () => { + ValidationStrategy["string!"]("foo"); + }); + + it("should throw an error when the value is an empty string", () => { + assert.throws(() => { + ValidationStrategy["string!"](""); + }, /Expected a non-empty string/); + }); + + it("should throw an error when the value is null", () => { + assert.throws(() => { + ValidationStrategy["string!"](null); + }, /Expected a non-empty string/); + }); + + it("should throw an error when the value is an object", () => { + assert.throws(() => { + ValidationStrategy["string!"]({}); + }, /Expected a non-empty string/); + }); + }); + + +}); diff --git a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 5db11eb3832eb..fdb947dc5905c 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -324,7 +324,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { .then(async root => { if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) { await new this.constructor(this.options).loadActual({ root }) - const tree = root.target || root + const tree = root.target // even though we didn't load it from a package-lock.json FILE, // we still loaded it "from disk", meaning we have to reset // dep flags before assuming that any mutations were reflected. @@ -396,7 +396,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { // update.names request by queueing nodes dependent on those named. async [_applyUserRequests] (options) { process.emit('time', 'idealTree:userRequests') - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target if (!this[_workspaces].length) await this[_applyUserRequestsToNode](tree, options) @@ -532,7 +532,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { /* istanbul ignore else - should also be covered by realpath failure */ if (filepath) { const { name } = spec - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target spec = npa(`file:${relpath(tree.path, filepath)}`, tree.path) spec.name = name } @@ -730,7 +730,7 @@ This is a one-time fix-up, please be patient... // or extraneous. [_buildDeps] () { process.emit('time', 'idealTree:buildDeps') - const tree = this.idealTree.target || this.idealTree + const tree = this.idealTree.target this[_depsQueue].push(tree) this.log.silly('idealTree', 'buildDeps') this.addTracker('idealTree', tree.name, '') @@ -788,7 +788,11 @@ This is a one-time fix-up, please be patient... const Arborist = this.constructor const opt = { ...this.options } await cacache.tmp.withTmp(this.cache, opt, async path => { - await pacote.extract(node.resolved, path, opt) + await pacote.extract(node.resolved, path, { + ...opt, + resolved: node.resolved, + integrity: node.integrity, + }) if (hasShrinkwrap) { await new Arborist({ ...this.options, path }) @@ -914,7 +918,7 @@ This is a one-time fix-up, please be patient... await Promise.all(promises) for (const { to } of node.edgesOut.values()) { - if (to && to.isLink) + if (to && to.isLink && to.target) this[_linkNodes].add(to) } @@ -1293,7 +1297,7 @@ This is a one-time fix-up, please be patient... // when installing globally, or just in global style, we never place // deps above the first level. - const tree = this.idealTree && this.idealTree.target || this.idealTree + const tree = this.idealTree && this.idealTree.target if (this[_globalStyle] && check.resolveParent === tree) break } @@ -1362,7 +1366,7 @@ This is a one-time fix-up, please be patient... integrity: dep.integrity, legacyPeerDeps: this.legacyPeerDeps, error: dep.errors[0], - ...(dep.target ? { target: dep.target, realpath: dep.target.path } : {}), + ...(dep.isLink ? { target: dep.target, realpath: dep.target.path } : {}), }) if (this[_loadFailures].has(dep)) this[_loadFailures].add(newDep) @@ -1421,7 +1425,7 @@ This is a one-time fix-up, please be patient... // prune anything deeper in the tree that can be replaced by this if (this.idealTree) { for (const node of this.idealTree.inventory.query('name', newDep.name)) { - if (node.isDescendantOf(target)) + if (!node.isTop && node.isDescendantOf(target)) this[_pruneDedupable](node, false) } } @@ -1819,7 +1823,7 @@ This is a one-time fix-up, please be patient... const current = target !== entryEdge.from && target.resolve(dep.name) if (current) { for (const edge of current.edgesIn.values()) { - if (edge.from.isDescendantOf(target) && edge.valid) { + if (!edge.from.isTop && edge.from.isDescendantOf(target) && edge.valid) { if (!edge.satisfiedBy(dep)) return CONFLICT } @@ -1876,7 +1880,8 @@ This is a one-time fix-up, please be patient... if (link.root !== this.idealTree) continue - const external = /^\.\.(\/|$)/.test(relpath(this.path, link.realpath)) + const tree = this.idealTree.target + const external = !link.target.isDescendantOf(tree) // outside the root, somebody else's problem, ignore it if (external && !this[_follow]) diff --git a/node_modules/@npmcli/arborist/lib/arborist/index.js b/node_modules/@npmcli/arborist/lib/arborist/index.js index 94501cae12c84..b26a26c2be2ab 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/index.js +++ b/node_modules/@npmcli/arborist/lib/arborist/index.js @@ -81,7 +81,7 @@ class Arborist extends Base { const dep = edge.to if (dep) { set.add(dep) - if (dep.target) + if (dep.isLink) set.add(dep.target) } } diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 9fca7d6425da0..86856d868b426 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -315,7 +315,7 @@ module.exports = cls => class ActualLoader extends cls { [_loadFSTree] (node) { const did = this[_actualTreeLoaded] - node = node.target || node + node = node.target // if a Link target has started, but not completed, then // a Promise will be in the cache to indicate this. diff --git a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index a98ed23b2a458..d1edcaca01d7e 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -221,7 +221,7 @@ module.exports = cls => class VirtualLoader extends cls { [assignBundles] (nodes) { for (const [location, node] of nodes) { // Skip assignment of parentage for the root package - if (!location || node.target && !node.target.location) + if (!location || node.isLink && !node.target.location) continue const { name, parent, package: { inBundle }} = node diff --git a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js index d189ad8c99e3c..8e447bb8f5ad1 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/rebuild.js +++ b/node_modules/@npmcli/arborist/lib/arborist/rebuild.js @@ -169,7 +169,7 @@ module.exports = cls => class Builder extends cls { const queue = [...set].sort(sortNodes) for (const node of queue) { - const { package: { bin, scripts = {} } } = node + const { package: { bin, scripts = {} } } = node.target const { preinstall, install, postinstall, prepare } = scripts const tests = { bin, preinstall, install, postinstall, prepare } for (const [key, has] of Object.entries(tests)) { @@ -202,7 +202,7 @@ module.exports = cls => class Builder extends cls { !(meta.originalLockfileVersion >= 2) } - const { package: pkg, hasInstallScript } = node + const { package: pkg, hasInstallScript } = node.target const { gypfile, bin, scripts = {} } = pkg const { preinstall, install, postinstall, prepare } = scripts @@ -263,7 +263,7 @@ module.exports = cls => class Builder extends cls { devOptional, package: pkg, location, - } = node.target || node + } = node.target // skip any that we know we'll be deleting if (this[_trashList].has(path)) diff --git a/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/@npmcli/arborist/lib/arborist/reify.js index f259a69b548e1..18b5cd65262a6 100644 --- a/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -289,8 +289,8 @@ module.exports = cls => class Reifier extends cls { const filterNodes = [] if (this[_global] && this.explicitRequests.size) { - const idealTree = this.idealTree.target || this.idealTree - const actualTree = this.actualTree.target || this.actualTree + const idealTree = this.idealTree.target + const actualTree = this.actualTree.target // we ONLY are allowed to make changes in the global top-level // children where there's an explicit request. for (const { name } of this.explicitRequests) { @@ -404,10 +404,9 @@ module.exports = cls => class Reifier extends cls { return process.emit('time', 'reify:trashOmits') - // node.parent is checked to make sure this is a node that's in the tree, and - // not the parent-less top level nodes + const filter = node => - node.isDescendantOf(this.idealTree) && + node.top.isProjectRoot && (node.peer && this[_omitPeer] || node.dev && this[_omitDev] || node.optional && this[_omitOptional] || @@ -664,7 +663,7 @@ module.exports = cls => class Reifier extends cls { const node = diff.ideal if (!node) return - if (node.isProjectRoot || (node.target && node.target.isProjectRoot)) + if (node.isProjectRoot) return const { bundleDependencies } = node.package @@ -887,6 +886,18 @@ module.exports = cls => class Reifier extends cls { filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE', }) + // pick up link nodes from the unchanged list as we want to run their + // scripts in every install despite of having a diff status change + for (const node of this.diff.unchanged) { + const tree = node.root.target + + // skip links that only live within node_modules as they are most + // likely managed by packages we installed, we only want to rebuild + // unchanged links we directly manage + if (node.isLink && node.target.fsTop === tree) + nodes.push(node) + } + return this.rebuild({ nodes, handleOptionalFailure: true }) .then(() => process.emit('timeEnd', 'reify:build')) } diff --git a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js index 21d8ddcf7b442..968fc83c5136c 100644 --- a/node_modules/@npmcli/arborist/lib/calc-dep-flags.js +++ b/node_modules/@npmcli/arborist/lib/calc-dep-flags.js @@ -29,7 +29,7 @@ const calcDepFlagsStep = (node) => { resetParents(node, 'optional') // for links, map their hierarchy appropriately - if (node.target) { + if (node.isLink) { node.target.dev = node.dev node.target.optional = node.optional node.target.devOptional = node.devOptional @@ -92,10 +92,10 @@ const unsetFlag = (node, flag) => { tree: node, visit: node => { node.extraneous = node[flag] = false - if (node.target) + if (node.isLink) node.target.extraneous = node.target[flag] = false }, - getChildren: node => [...(node.target || node).edgesOut.values()] + getChildren: node => [...node.target.edgesOut.values()] .filter(edge => edge.to && edge.to[flag] && (flag !== 'peer' && edge.type === 'peer' || edge.type === 'prod')) .map(edge => edge.to), diff --git a/node_modules/@npmcli/arborist/lib/diff.js b/node_modules/@npmcli/arborist/lib/diff.js index 1f8eff0f0c4d9..2008ef7a35bdd 100644 --- a/node_modules/@npmcli/arborist/lib/diff.js +++ b/node_modules/@npmcli/arborist/lib/diff.js @@ -45,8 +45,7 @@ class Diff { const { root } = filterNode if (root !== ideal && root !== actual) throw new Error('invalid filterNode: outside idealTree/actualTree') - const { target } = root - const rootTarget = target || root + const rootTarget = root.target const edge = [...rootTarget.edgesOut.values()].filter(e => { return e.to && (e.to === filterNode || e.to.target === filterNode) })[0] @@ -56,8 +55,7 @@ class Diff { filterSet.add(actual) if (edge && edge.to) { filterSet.add(edge.to) - if (edge.to.target) - filterSet.add(edge.to.target) + filterSet.add(edge.to.target) } filterSet.add(filterNode) @@ -65,7 +63,7 @@ class Diff { tree: filterNode, visit: node => filterSet.add(node), getChildren: node => { - node = node.target || node + node = node.target const loc = node.location const idealNode = ideal.inventory.get(loc) const ideals = !idealNode ? [] diff --git a/node_modules/@npmcli/arborist/lib/node.js b/node_modules/@npmcli/arborist/lib/node.js index c21bc46cfb539..2ef0a64f08829 100644 --- a/node_modules/@npmcli/arborist/lib/node.js +++ b/node_modules/@npmcli/arborist/lib/node.js @@ -409,7 +409,7 @@ class Node { } isDescendantOf (node) { - for (let p = this; p; p = p.parent) { + for (let p = this; p; p = p.resolveParent) { if (p === node) return true } @@ -649,7 +649,7 @@ class Node { }) if (this.isLink) { - const target = node.target || node + const target = node.target this[_target] = target this[_package] = target.package target.linksIn.add(this) @@ -1174,7 +1174,7 @@ class Node { } get target () { - return null + return this } set target (n) { @@ -1197,6 +1197,14 @@ class Node { return this.isTop ? this : this.parent.top } + get isFsTop () { + return !this.fsParent + } + + get fsTop () { + return this.isFsTop ? this : this.fsParent.fsTop + } + get resolveParent () { return this.parent || this.fsParent } diff --git a/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/node_modules/@npmcli/arborist/lib/shrinkwrap.js index b251539a94c90..3b2cf0bde1036 100644 --- a/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -802,7 +802,7 @@ class Shrinkwrap { if (this.tree) { if (this.yarnLock) this.yarnLock.fromTree(this.tree) - const root = Shrinkwrap.metaFromNode(this.tree.target || this.tree, this.path) + const root = Shrinkwrap.metaFromNode(this.tree.target, this.path) this.data.packages = {} if (Object.keys(root).length) this.data.packages[''] = root @@ -864,7 +864,7 @@ class Shrinkwrap { const spec = !edge ? rSpec : npa.resolve(node.name, edge.spec, edge.from.realpath) - if (node.target) + if (node.isLink) lock.version = `file:${relpath(this.path, node.realpath)}` else if (spec && (spec.type === 'file' || spec.type === 'remote')) lock.version = spec.saveSpec @@ -888,7 +888,7 @@ class Shrinkwrap { // when we didn't resolve to git, file, or dir, and didn't request // git, file, dir, or remote, then the resolved value is necessary. if (node.resolved && - !node.target && + !node.isLink && rSpec.type !== 'git' && rSpec.type !== 'file' && rSpec.type !== 'directory' && @@ -917,7 +917,7 @@ class Shrinkwrap { lock.optional = true } - const depender = node.target || node + const depender = node.target if (depender.edgesOut.size > 0) { if (node !== this.tree) { lock.requires = [...depender.edgesOut.entries()].reduce((set, [k, v]) => { @@ -942,7 +942,7 @@ class Shrinkwrap { } // now we walk the children, putting them in the 'dependencies' object - const {children} = node.target || node + const {children} = node.target if (!children.size) delete lock.dependencies else { diff --git a/node_modules/@npmcli/arborist/package.json b/node_modules/@npmcli/arborist/package.json index 138d6ec25b4c2..c45a61086ea5e 100644 --- a/node_modules/@npmcli/arborist/package.json +++ b/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "2.6.4", + "version": "2.7.1", "description": "Manage node_modules trees", "dependencies": { "@npmcli/installed-package-contents": "^1.0.7", @@ -16,6 +16,7 @@ "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.0", @@ -28,7 +29,9 @@ "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", + "rimraf": "^3.0.2", "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" diff --git a/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..25b2b9f9fd6a6 --- /dev/null +++ b/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor (message) { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor (message) { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor (message) { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError +} diff --git a/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/git/lib/index.js index 50fd889b89b5a..20d7cfd01cfd1 100644 --- a/node_modules/@npmcli/git/lib/index.js +++ b/node_modules/@npmcli/git/lib/index.js @@ -4,5 +4,6 @@ module.exports = { spawn: require('./spawn.js'), is: require('./is.js'), find: require('./find.js'), - isClean: require('./is-clean.js') + isClean: require('./is-clean.js'), + errors: require('./errors.js') } diff --git a/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..043a8e6e95181 --- /dev/null +++ b/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503' +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/@npmcli/git/lib/should-retry.js b/node_modules/@npmcli/git/lib/should-retry.js deleted file mode 100644 index 8082bb5d7c6e7..0000000000000 --- a/node_modules/@npmcli/git/lib/should-retry.js +++ /dev/null @@ -1,17 +0,0 @@ -const transientErrors = [ - 'remote error: Internal Server Error', - 'The remote end hung up unexpectedly', - 'Connection timed out', - 'Operation timed out', - 'Failed to connect to .* Timed out', - 'Connection reset by peer', - 'SSL_ERROR_SYSCALL', - 'The requested URL returned error: 503' -].join('|') - -const transientErrorRe = new RegExp(transientErrors) - -const maxRetry = 3 - -module.exports = (error, number) => - transientErrorRe.test(error) && (number < maxRetry) diff --git a/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/git/lib/spawn.js index 337164a9a012d..1c89a4c53cf86 100644 --- a/node_modules/@npmcli/git/lib/spawn.js +++ b/node_modules/@npmcli/git/lib/spawn.js @@ -1,6 +1,6 @@ const spawn = require('@npmcli/promise-spawn') const promiseRetry = require('promise-retry') -const shouldRetry = require('./should-retry.js') +const makeError = require('./make-error.js') const whichGit = require('./which.js') const makeOpts = require('./opts.js') const procLog = require('./proc-log.js') @@ -33,10 +33,11 @@ module.exports = (gitArgs, opts = {}) => { return spawn(gitPath, args, makeOpts(opts)) .catch(er => { - if (!shouldRetry(er.stderr, number)) { - throw er + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError } - retry(er) + retry(gitError) }) }, retry) } diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json index 0fe94686ece20..9475da5007a7d 100644 --- a/node_modules/@npmcli/git/package.json +++ b/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "2.0.9", + "version": "2.1.0", "main": "lib/index.js", "files": [ "lib/*.js" diff --git a/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/make-fetch-happen/lib/cache/entry.js index 41f8a3d215ee1..a2acea156ee6f 100644 --- a/node_modules/make-fetch-happen/lib/cache/entry.js +++ b/node_modules/make-fetch-happen/lib/cache/entry.js @@ -48,6 +48,7 @@ const KEEP_RESPONSE_HEADERS = [ // return an object containing all metadata to be written to the index const getMetadata = (request, response, options) => { const metadata = { + time: Date.now(), url: request.url, reqHeaders: {}, resHeaders: {}, @@ -112,9 +113,18 @@ const _policy = Symbol('policy') class CacheEntry { constructor ({ entry, request, response, options }) { - this.entry = entry + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else + this.key = cacheKey(request) + this.options = options - this.key = entry ? entry.key : cacheKey(request) // these properties are behind getters that lazily evaluate this[_request] = request @@ -340,13 +350,25 @@ class CacheEntry { const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) body.end(content) } catch (err) { + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) body.emit('error', err) } } } else { onResume = () => { const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) - cacheStream.on('error', (err) => body.emit('error', err)) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + cacheStream.resume() + }) cacheStream.pipe(body) } } @@ -368,7 +390,7 @@ class CacheEntry { response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.time).toUTCString()) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) return response } diff --git a/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/make-fetch-happen/lib/cache/policy.js index 189dce80ee68e..e0959f64ddf9d 100644 --- a/node_modules/make-fetch-happen/lib/cache/policy.js +++ b/node_modules/make-fetch-happen/lib/cache/policy.js @@ -67,7 +67,7 @@ class CachePolicy { // this is necessary because the CacheSemantics constructor forces // the value to Date.now() which means a policy created from a // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.time + this.policy._responseTime = this.entry.metadata.time } } diff --git a/node_modules/make-fetch-happen/lib/remote.js b/node_modules/make-fetch-happen/lib/remote.js index e37f39de845f3..7e4ed24edb530 100644 --- a/node_modules/make-fetch-happen/lib/remote.js +++ b/node_modules/make-fetch-happen/lib/remote.js @@ -14,6 +14,7 @@ const RETRY_ERRORS = [ 'ECONNREFUSED', // remote host refused to open connection 'EADDRINUSE', // failed to bind to a local port (proxy?) 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive // Known codes we do NOT retry on: // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) ] diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 44330998bb02f..e4a26a9cd94df 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "9.0.3", + "version": "9.0.4", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ diff --git a/node_modules/minipass-fetch/lib/index.js b/node_modules/minipass-fetch/lib/index.js index d6ed57942e80f..2ffcba8510554 100644 --- a/node_modules/minipass-fetch/lib/index.js +++ b/node_modules/minipass-fetch/lib/index.js @@ -94,6 +94,19 @@ const fetch = (url, opts) => { } req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) + req.res.emit('error', er) reject(new FetchError(`request to ${request.url} failed, reason: ${ er.message}`, 'system', er)) finalize() @@ -286,8 +299,16 @@ const fetch = (url, opts) => { // for br - if (codings == 'br' && typeof zlib.BrotliDecompress === 'function') { - const decoder = new zlib.BrotliDecompress() + if (codings == 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } // exceedingly rare that the stream would have an error, // but just in case we proxy it to the stream in use. body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) diff --git a/node_modules/minipass-fetch/lib/request.js b/node_modules/minipass-fetch/lib/request.js index c5208a7fc1300..173f415d18e7b 100644 --- a/node_modules/minipass-fetch/lib/request.js +++ b/node_modules/minipass-fetch/lib/request.js @@ -77,6 +77,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -101,6 +102,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -208,6 +210,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, @@ -234,6 +237,7 @@ class Request extends Body { crl, dhparam, ecdhCurve, + family, honorCipherOrder, key, passphrase, diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json index df48f372a6079..64dab7816bd12 100644 --- a/node_modules/minipass-fetch/package.json +++ b/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "1.3.3", + "version": "1.3.4", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", diff --git a/node_modules/pacote/lib/fetcher.js b/node_modules/pacote/lib/fetcher.js index d488e88ff7236..69dd025b7bd98 100644 --- a/node_modules/pacote/lib/fetcher.js +++ b/node_modules/pacote/lib/fetcher.js @@ -119,6 +119,13 @@ class FetcherBase { '--no-progress', '--no-save', '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', ] } @@ -430,6 +437,7 @@ class FetcherBase { return { cwd, noChmod: true, + noMtime: true, filter: (name, entry) => { if (/Link$/.test(entry.type)) return false diff --git a/node_modules/pacote/lib/git.js b/node_modules/pacote/lib/git.js index 973e13ea9be43..18f42547bb3ac 100644 --- a/node_modules/pacote/lib/git.js +++ b/node_modules/pacote/lib/git.js @@ -85,6 +85,9 @@ class GitFetcher extends Fetcher { [_resolvedFromHosted] (hosted) { return this[_resolvedFromRepo](hosted.https && hosted.https()) .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) + throw er const ssh = hosted.sshurl && hosted.sshurl() // no fallthrough if we can't fall through or have https auth if (!ssh || hosted.auth) @@ -260,9 +263,11 @@ class GitFetcher extends Fetcher { // is present, otherwise ssh if the hosted type provides it [_cloneHosted] (ref, tmp) { const hosted = this.spec.hosted - const https = hosted.https() return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) + throw er const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) // no fallthrough if we can't fall through or have https auth if (!ssh || hosted.auth) diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index 7472c6eeab0cc..437bb8f79e1d8 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "11.3.4", + "version": "11.3.5", "description": "JavaScript package downloader", "author": "Isaac Z. Schlueter (https://izs.me)", "bin": { @@ -33,7 +33,7 @@ "git" ], "dependencies": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", diff --git a/node_modules/socks/typings/common/receivebuffer.d.ts b/node_modules/socks/typings/common/receivebuffer.d.ts new file mode 100644 index 0000000000000..756e98b5893ed --- /dev/null +++ b/node_modules/socks/typings/common/receivebuffer.d.ts @@ -0,0 +1,12 @@ +/// +declare class ReceiveBuffer { + private buffer; + private offset; + private originalSize; + constructor(size?: number); + get length(): number; + append(data: Buffer): number; + peek(length: number): Buffer; + get(length: number): Buffer; +} +export { ReceiveBuffer }; diff --git a/package-lock.json b/package-lock.json index 570cded5aa58b..873464a3cf4b2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "npm", - "version": "7.19.1", + "version": "7.20.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "npm", - "version": "7.19.1", + "version": "7.20.0", "bundleDependencies": [ "@npmcli/arborist", "@npmcli/ci-detect", @@ -83,7 +83,7 @@ "packages/*" ], "dependencies": { - "@npmcli/arborist": "^2.6.4", + "@npmcli/arborist": "^2.7.1", "@npmcli/ci-detect": "^1.2.0", "@npmcli/config": "^2.2.0", "@npmcli/package-json": "^1.0.1", @@ -118,7 +118,7 @@ "libnpmsearch": "^3.1.1", "libnpmteam": "^2.0.3", "libnpmversion": "^1.2.1", - "make-fetch-happen": "^9.0.3", + "make-fetch-happen": "^9.0.4", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -134,7 +134,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "~4.1.2", "opener": "^1.5.2", - "pacote": "^11.3.3", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", @@ -157,7 +157,7 @@ "npx": "bin/npx-cli.js" }, "devDependencies": { - "eslint": "^7.26.0", + "eslint": "^7.30.0", "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", "eslint-plugin-promise": "^5.1.0", @@ -589,6 +589,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, "node_modules/@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -735,9 +755,9 @@ } }, "node_modules/@npmcli/arborist": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.6.4.tgz", - "integrity": "sha512-A/pDQ/VZpdxaqsQS5XOWrhrPuC+ER7HLq+4ZkEmnO2yo/USFCWEsiUPYKhfY+sWXK3pgKjN7B7CEFmAnSoAt3g==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.7.1.tgz", + "integrity": "sha512-EGDHJs6dna/52BrStr/6aaRcMLrYxGbSjT4V3JzvoTBY9/w5i2+1KNepmsG80CAsGADdo6nuNnFwb7sDRm8ZAw==", "inBundle": true, "dependencies": { "@npmcli/installed-package-contents": "^1.0.7", @@ -753,6 +773,7 @@ "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.0", @@ -765,7 +786,9 @@ "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", + "rimraf": "^3.0.2", "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" @@ -811,9 +834,9 @@ } }, "node_modules/@npmcli/git": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.9.tgz", - "integrity": "sha512-hTMbMryvOqGLwnmMBKs5usbPsJtyEsMsgXwJbmNrsEuQQh1LAIMDU77IoOrwkCg+NgQWl+ySlarJASwM3SutCA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz", + "integrity": "sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw==", "inBundle": true, "dependencies": { "@npmcli/promise-spawn": "^1.3.2", @@ -2527,13 +2550,14 @@ } }, "node_modules/eslint": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.29.0.tgz", - "integrity": "sha512-82G/JToB9qIy/ArBzIWG9xvvwL3R86AlCjtGw+A29OMZDqhTybz/MByORSukGxeI+YPCR4coYyITKk8BFH9nDA==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz", + "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==", "dev": true, "dependencies": { "@babel/code-frame": "7.12.11", "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -4965,9 +4989,9 @@ } }, "node_modules/make-fetch-happen": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.3.tgz", - "integrity": "sha512-uZ/9Cf2vKqsSWZyXhZ9wHHyckBrkntgbnqV68Bfe8zZenlf7D6yuGMXvHZQ+jSnzPkjosuNP1HGasj1J4h8OlQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.4.tgz", + "integrity": "sha512-sQWNKMYqSmbAGXqJg2jZ+PmHh5JAybvwu0xM8mZR/bsTjGiTASj3ldXJV7KFHy1k/IJIBkjxQFoWIVsv9+PQMg==", "inBundle": true, "dependencies": { "agentkeepalive": "^4.1.3", @@ -5154,9 +5178,9 @@ } }, "node_modules/minipass-fetch": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.3.tgz", - "integrity": "sha512-akCrLDWfbdAWkMLBxJEeWTdNsjML+dt5YgOI4gJ53vuO0vrmYQkUPxa6j6V65s9CcePIr2SSWqjT2EcrNseryQ==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", + "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", "inBundle": true, "dependencies": { "minipass": "^3.1.0", @@ -5893,12 +5917,12 @@ } }, "node_modules/pacote": { - "version": "11.3.4", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.4.tgz", - "integrity": "sha512-RfahPCunM9GI7ryJV/zY0bWQiokZyLqaSNHXtbNSoLb7bwTvBbJBEyCJ01KWs4j1Gj7GmX8crYXQ1sNX6P2VKA==", + "version": "11.3.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", + "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", "inBundle": true, "dependencies": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", @@ -10759,6 +10783,23 @@ } } }, + "@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", + "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "dev": true + }, "@istanbuljs/load-nyc-config": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", @@ -10869,9 +10910,9 @@ "dev": true }, "@npmcli/arborist": { - "version": "2.6.4", - "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.6.4.tgz", - "integrity": "sha512-A/pDQ/VZpdxaqsQS5XOWrhrPuC+ER7HLq+4ZkEmnO2yo/USFCWEsiUPYKhfY+sWXK3pgKjN7B7CEFmAnSoAt3g==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-2.7.1.tgz", + "integrity": "sha512-EGDHJs6dna/52BrStr/6aaRcMLrYxGbSjT4V3JzvoTBY9/w5i2+1KNepmsG80CAsGADdo6nuNnFwb7sDRm8ZAw==", "requires": { "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/map-workspaces": "^1.0.2", @@ -10886,6 +10927,7 @@ "common-ancestor-path": "^1.0.1", "json-parse-even-better-errors": "^2.3.1", "json-stringify-nice": "^1.1.4", + "mkdirp": "^1.0.4", "mkdirp-infer-owner": "^2.0.0", "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.0", @@ -10898,7 +10940,9 @@ "promise-call-limit": "^1.0.1", "read-package-json-fast": "^2.0.2", "readdir-scoped-modules": "^1.1.0", + "rimraf": "^3.0.2", "semver": "^7.3.5", + "ssri": "^8.0.1", "tar": "^6.1.0", "treeverse": "^1.0.4", "walk-up-path": "^1.0.0" @@ -10930,9 +10974,9 @@ } }, "@npmcli/git": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.0.9.tgz", - "integrity": "sha512-hTMbMryvOqGLwnmMBKs5usbPsJtyEsMsgXwJbmNrsEuQQh1LAIMDU77IoOrwkCg+NgQWl+ySlarJASwM3SutCA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-2.1.0.tgz", + "integrity": "sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw==", "requires": { "@npmcli/promise-spawn": "^1.3.2", "lru-cache": "^6.0.0", @@ -12212,13 +12256,14 @@ } }, "eslint": { - "version": "7.29.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.29.0.tgz", - "integrity": "sha512-82G/JToB9qIy/ArBzIWG9xvvwL3R86AlCjtGw+A29OMZDqhTybz/MByORSukGxeI+YPCR4coYyITKk8BFH9nDA==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.30.0.tgz", + "integrity": "sha512-VLqz80i3as3NdloY44BQSJpFw534L9Oh+6zJOUaViV4JPd+DaHwutqP7tcpkW3YiXbK6s05RZl7yl7cQn+lijg==", "dev": true, "requires": { "@babel/code-frame": "7.12.11", "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -14010,9 +14055,9 @@ } }, "make-fetch-happen": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.3.tgz", - "integrity": "sha512-uZ/9Cf2vKqsSWZyXhZ9wHHyckBrkntgbnqV68Bfe8zZenlf7D6yuGMXvHZQ+jSnzPkjosuNP1HGasj1J4h8OlQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.0.4.tgz", + "integrity": "sha512-sQWNKMYqSmbAGXqJg2jZ+PmHh5JAybvwu0xM8mZR/bsTjGiTASj3ldXJV7KFHy1k/IJIBkjxQFoWIVsv9+PQMg==", "requires": { "agentkeepalive": "^4.1.3", "cacache": "^15.2.0", @@ -14142,9 +14187,9 @@ } }, "minipass-fetch": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.3.tgz", - "integrity": "sha512-akCrLDWfbdAWkMLBxJEeWTdNsjML+dt5YgOI4gJ53vuO0vrmYQkUPxa6j6V65s9CcePIr2SSWqjT2EcrNseryQ==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.3.4.tgz", + "integrity": "sha512-TielGogIzbUEtd1LsjZFs47RWuHHfhl6TiCx1InVxApBAmQ8bL0dL5ilkLGcRvuyW/A9nE+Lvn855Ewz8S0PnQ==", "requires": { "encoding": "^0.1.12", "minipass": "^3.1.0", @@ -14687,11 +14732,11 @@ } }, "pacote": { - "version": "11.3.4", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.4.tgz", - "integrity": "sha512-RfahPCunM9GI7ryJV/zY0bWQiokZyLqaSNHXtbNSoLb7bwTvBbJBEyCJ01KWs4j1Gj7GmX8crYXQ1sNX6P2VKA==", + "version": "11.3.5", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-11.3.5.tgz", + "integrity": "sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==", "requires": { - "@npmcli/git": "^2.0.1", + "@npmcli/git": "^2.1.0", "@npmcli/installed-package-contents": "^1.0.6", "@npmcli/promise-spawn": "^1.2.0", "@npmcli/run-script": "^1.8.2", diff --git a/package.json b/package.json index 73b03991026e1..f0b9642905ced 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "version": "7.19.1", + "version": "7.20.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -53,7 +53,7 @@ "./package.json": "./package.json" }, "dependencies": { - "@npmcli/arborist": "^2.6.4", + "@npmcli/arborist": "^2.7.1", "@npmcli/ci-detect": "^1.2.0", "@npmcli/config": "^2.2.0", "@npmcli/package-json": "^1.0.1", @@ -88,7 +88,7 @@ "libnpmsearch": "^3.1.1", "libnpmteam": "^2.0.3", "libnpmversion": "^1.2.1", - "make-fetch-happen": "^9.0.3", + "make-fetch-happen": "^9.0.4", "minipass": "^3.1.3", "minipass-pipeline": "^1.2.4", "mkdirp": "^1.0.4", @@ -104,7 +104,7 @@ "npm-user-validate": "^1.0.1", "npmlog": "~4.1.2", "opener": "^1.5.2", - "pacote": "^11.3.3", + "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", @@ -193,7 +193,7 @@ "write-file-atomic" ], "devDependencies": { - "eslint": "^7.26.0", + "eslint": "^7.30.0", "eslint-plugin-import": "^2.23.4", "eslint-plugin-node": "^11.1.0", "eslint-plugin-promise": "^5.1.0", diff --git a/scripts/bundle-and-gitignore-deps.js b/scripts/bundle-and-gitignore-deps.js index 407b9e5982514..96c1419e21807 100644 --- a/scripts/bundle-and-gitignore-deps.js +++ b/scripts/bundle-and-gitignore-deps.js @@ -30,7 +30,9 @@ arb.loadVirtual().then(tree => { /.package-lock.json package-lock.json CHANGELOG* +changelog* README* +readme* .editorconfig .idea/ .npmignore diff --git a/smoke-tests/index.js b/smoke-tests/index.js index c7b2d2a1cf7bc..9235c8960a26a 100644 --- a/smoke-tests/index.js +++ b/smoke-tests/index.js @@ -209,3 +209,35 @@ t.test('npm uninstall', async t => { 'should have expected uninstall lockfile result' ) }) + +t.test('npm pkg', async t => { + let cmd = `${npmBin} pkg get license` + let cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg get output') + + cmd = `${npmBin} pkg set tap[test-env][0]=LC_ALL=sk` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg set output') + + t.matchSnapshot( + readFile('package.json'), + 'should have expected npm pkg set modified package.json result' + ) + + cmd = `${npmBin} pkg get` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should print package.json contents') + + cmd = `${npmBin} pkg delete tap` + cmdRes = await exec(cmd) + t.matchSnapshot(cmdRes.replace(/in.*s/, ''), + 'should have expected pkg delete output') + + t.matchSnapshot( + readFile('package.json'), + 'should have expected npm pkg delete modified package.json result' + ) +}) diff --git a/tap-snapshots/smoke-tests/index.js.test.cjs b/tap-snapshots/smoke-tests/index.js.test.cjs index 89c0cb20b5e36..0a79e38cdfa03 100644 --- a/tap-snapshots/smoke-tests/index.js.test.cjs +++ b/tap-snapshots/smoke-tests/index.js.test.cjs @@ -26,10 +26,10 @@ All commands: edit, exec, explain, explore, find-dupes, fund, get, help, hook, init, install, install-ci-test, install-test, link, ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami Specify configs in the ini-formatted file: {CWD}/smoke-tests/tap-testdir-index/.npmrc @@ -482,6 +482,89 @@ abbrev 1.0.4 1.1.1 1.1.1 node_modules/abbrev project ` +exports[`smoke-tests/index.js TAP npm pkg > should have expected npm pkg delete modified package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + } +} + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected npm pkg set modified package.json result 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "tap": { + "test-env": [ + "LC_ALL=sk" + ] + } +} + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg delete output 1`] = ` + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg get output 1`] = ` +"ISC" + +` + +exports[`smoke-tests/index.js TAP npm pkg > should have expected pkg set output 1`] = ` + +` + +exports[`smoke-tests/index.js TAP npm pkg > should print package.json contents 1`] = ` +{ + "name": "project", + "version": "1.0.0", + "description": "", + "ma", + "scripts": { + "test": "echo /"Error: no test specified/" && exit 1", + "hello": "echo Hello" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.4" + }, + "tap": { + "test-env": [ + "LC_ALL=sk" + ] + } +} + +` + exports[`smoke-tests/index.js TAP npm prefix > should have expected prefix output 1`] = ` {CWD}/smoke-tests/tap-testdir-index/project diff --git a/tap-snapshots/test/lib/config.js.test.cjs b/tap-snapshots/test/lib/config.js.test.cjs index 84418ec2e816d..b5acbb0af94c5 100644 --- a/tap-snapshots/test/lib/config.js.test.cjs +++ b/tap-snapshots/test/lib/config.js.test.cjs @@ -5,7 +5,7 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' -exports[`test/lib/config.js TAP config edit --global > should write global config file 1`] = ` +exports[`test/lib/config.js TAP config edit --location=global > should write global config file 1`] = ` ;;;; ; npm globalconfig file: /etc/npmrc ; this is a simple ini-formatted file @@ -92,8 +92,8 @@ cat = true chai = true dog = true editor = "vi" -global = false json = false +location = "user" long = false ; node bin location = /path/to/node @@ -116,8 +116,8 @@ cat = true chai = true dog = true editor = "vi" -global = false json = false +location = "user" long = true ` @@ -128,8 +128,8 @@ cat = true chai = true dog = true editor = "vi" -global = false json = false +location = "user" long = false ; node bin location = /path/to/node @@ -145,9 +145,9 @@ cat = true chai = true dog = true editor = "vi" -global = false init.author.name = "Bar" json = false +location = "user" long = false ; "user" config from ~/.npmrc diff --git a/tap-snapshots/test/lib/load-all-commands.js.test.cjs b/tap-snapshots/test/lib/load-all-commands.js.test.cjs index 3575783a644b2..8cf2e2837e295 100644 --- a/tap-snapshots/test/lib/load-all-commands.js.test.cjs +++ b/tap-snapshots/test/lib/load-all-commands.js.test.cjs @@ -84,7 +84,7 @@ Usage: npm bugs [] Options: -[--browser|--browser ] [--registry ] +[--no-browser|--browser ] [--registry ] alias: issues @@ -120,7 +120,7 @@ Usage: npm ci Options: -[--ignore-scripts] [--script-shell ] +[--no-audit] [--ignore-scripts] [--script-shell ] aliases: clean-install, ic, install-clean, isntall-clean @@ -151,7 +151,8 @@ npm config list [--json] npm config edit Options: -[--json] [-g|--global] [--editor ] [-l|--long] +[--json] [-g|--global] [--editor ] [-L|--location ] +[-l|--long] alias: c @@ -167,9 +168,9 @@ Usage: npm dedupe Options: -[--global-style] [--legacy-bundling] [--strict-peer-deps] [--package-lock] +[--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] -[--audit] [--bin-links] [--fund] [--dry-run] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -239,7 +240,7 @@ Usage: npm docs [ [ ...]] Options: -[--browser|--browser ] [--registry ] +[--no-browser|--browser ] [--registry ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -337,9 +338,9 @@ Usage: npm find-dupes Options: -[--global-style] [--legacy-bundling] [--strict-peer-deps] [--package-lock] +[--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] -[--audit] [--bin-links] [--fund] +[--no-audit] [--no-bin-links] [--no-fund] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -355,7 +356,7 @@ Usage: npm fund [[<@scope>/]] Options: -[--json] [--browser|--browser ] [--unicode] +[--json] [--no-browser|--browser ] [--unicode] [-w|--workspace [-w|--workspace ...]] [--which ] @@ -446,9 +447,9 @@ npm install / Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] -[--strict-peer-deps] [--package-lock] +[--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] -[--audit] [--bin-links] [--fund] [--dry-run] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -466,7 +467,7 @@ Usage: npm install-ci-test Options: -[--ignore-scripts] [--script-shell ] +[--no-audit] [--ignore-scripts] [--script-shell ] alias: cit @@ -493,9 +494,9 @@ npm install-test / Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] -[--strict-peer-deps] [--package-lock] +[--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] -[--audit] [--bin-links] [--fund] [--dry-run] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -516,9 +517,9 @@ npm link [<@scope>/][@] Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] -[--strict-peer-deps] [--package-lock] +[--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] -[--audit] [--bin-links] [--fund] [--dry-run] +[--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -678,6 +679,24 @@ Options: Run "npm help ping" for more info ` +exports[`test/lib/load-all-commands.js TAP load each command pkg > must match snapshot 1`] = ` +npm pkg + +Manages your package.json + +Usage: +npm pkg set = [= ...] +npm pkg get [ [ ...]] +npm pkg delete [ ...] + +Options: +[-f|--force] [--json] +[-w|--workspace [-w|--workspace ...]] +[-ws|--workspaces] + +Run "npm help pkg" for more info +` + exports[`test/lib/load-all-commands.js TAP load each command prefix > must match snapshot 1`] = ` npm prefix @@ -750,7 +769,7 @@ Usage: npm rebuild [[<@scope>/][@] ...] Options: -[-g|--global] [--bin-links] [--ignore-scripts] +[-g|--global] [--no-bin-links] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -768,7 +787,7 @@ Usage: npm repo [ [ ...]] Options: -[--browser|--browser ] +[--no-browser|--browser ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -830,7 +849,7 @@ Usage: npm search [search terms ...] Options: -[-l|--long] [--json] [--color|--color ] [-p|--parseable] +[-l|--long] [--json] [--color|--no-color|--color always] [-p|--parseable] [--no-description] [--searchopts ] [--searchexclude ] [--registry ] [--prefer-online] [--prefer-offline] [--offline] @@ -1041,8 +1060,8 @@ npm update [...] Options: [-g|--global] [--global-style] [--legacy-bundling] [--strict-peer-deps] -[--package-lock] [--omit [--omit ...]] -[--ignore-scripts] [--audit] [--bin-links] [--fund] [--dry-run] +[--no-package-lock] [--omit [--omit ...]] +[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -1060,7 +1079,7 @@ Usage: npm version [ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git] Options: -[--allow-same-version] [--commit-hooks] [--git-tag-version] [--json] +[--allow-same-version] [--no-commit-hooks] [--no-git-tag-version] [--json] [--preid prerelease-id] [--sign-git-tag] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] diff --git a/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs b/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs index 832f8560125a3..971580792048b 100644 --- a/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs +++ b/tap-snapshots/test/lib/utils/cmd-list.js.test.cjs @@ -158,6 +158,7 @@ Object { "diff", "dist-tag", "ping", + "pkg", "test", "stop", "start", diff --git a/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs b/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs index 12df9ec89f6f7..01b137b8af54a 100644 --- a/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs +++ b/tap-snapshots/test/lib/utils/config/definitions.js.test.cjs @@ -81,6 +81,7 @@ Array [ "legacy-peer-deps", "link", "local-address", + "location", "loglevel", "logs-max", "long", @@ -218,9 +219,10 @@ exports[`test/lib/utils/config/definitions.js TAP > config description for audit * Default: true * Type: Boolean -When "true" submit audit reports alongside \`npm install\` runs to the default -registry and all registries configured for scopes. See the documentation for -[\`npm audit\`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [\`npm audit\`](/commands/npm-audit) for details on what is +submitted. ` exports[`test/lib/utils/config/definitions.js TAP > config description for audit-level 1`] = ` @@ -648,6 +650,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set \`--yes\` during \`npm init\`. +* Allow clobbering existing values in \`npm pkg\` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -949,6 +952,9 @@ exports[`test/lib/utils/config/definitions.js TAP > config description for json Whether or not to output JSON data, rather than the normal output. +* In \`npm pkg set\` it enables parsing set values with JSON.parse() before + saving them to your \`package.json\`. + Not supported by all npm commands. ` @@ -1019,6 +1025,16 @@ The IP address of the local interface to use when making connections to the npm registry. Must be IPv4 in versions of Node prior to 0.12. ` +exports[`test/lib/utils/config/definitions.js TAP > config description for location 1`] = ` +#### \`location\` + +* Default: "user" unless \`--global\` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to \`npm config\` this refers to which config file to use. +` + exports[`test/lib/utils/config/definitions.js TAP > config description for loglevel 1`] = ` #### \`loglevel\` diff --git a/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs b/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs index daa071b642e94..8487b45174cc3 100644 --- a/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs +++ b/tap-snapshots/test/lib/utils/config/describe-all.js.test.cjs @@ -49,9 +49,10 @@ to the same value as the current version. * Default: true * Type: Boolean -When "true" submit audit reports alongside \`npm install\` runs to the default -registry and all registries configured for scopes. See the documentation for -[\`npm audit\`](/commands/npm-audit) for details on what is submitted. +When "true" submit audit reports alongside the current npm command to the +default registry and all registries configured for scopes. See the +documentation for [\`npm audit\`](/commands/npm-audit) for details on what is +submitted. #### \`audit-level\` @@ -374,6 +375,7 @@ mistakes, unnecessary performance degradation, and malicious input. * Allow unpublishing all versions of a published package. * Allow conflicting peerDependencies to be installed in the root project. * Implicitly set \`--yes\` during \`npm init\`. +* Allow clobbering existing values in \`npm pkg\` If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option! @@ -573,6 +575,9 @@ number, if not already set in package.json. Whether or not to output JSON data, rather than the normal output. +* In \`npm pkg set\` it enables parsing set values with JSON.parse() before + saving them to your \`package.json\`. + Not supported by all npm commands. #### \`key\` @@ -632,6 +637,14 @@ Used with \`npm ls\`, limiting output to only those packages that are linked. The IP address of the local interface to use when making connections to the npm registry. Must be IPv4 in versions of Node prior to 0.12. +#### \`location\` + +* Default: "user" unless \`--global\` is passed, which will also set this value + to "global" +* Type: "global", "user", or "project" + +When passed to \`npm config\` this refers to which config file to use. + #### \`loglevel\` * Default: "notice" diff --git a/tap-snapshots/test/lib/utils/config/index.js.test.cjs b/tap-snapshots/test/lib/utils/config/index.js.test.cjs index 1e5ca232452e0..f1cba9264ee2f 100644 --- a/tap-snapshots/test/lib/utils/config/index.js.test.cjs +++ b/tap-snapshots/test/lib/utils/config/index.js.test.cjs @@ -64,6 +64,9 @@ Object { "l": Array [ "--long", ], + "L": Array [ + "--location", + ], "local": Array [ "--no-global", ], diff --git a/tap-snapshots/test/lib/utils/error-message.js.test.cjs b/tap-snapshots/test/lib/utils/error-message.js.test.cjs index 7b02dbd9aaa65..5b6e3c85ab112 100644 --- a/tap-snapshots/test/lib/utils/error-message.js.test.cjs +++ b/tap-snapshots/test/lib/utils/error-message.js.test.cjs @@ -1289,6 +1289,29 @@ Object { } ` +exports[`test/lib/utils/error-message.js TAP just simple messages > must match snapshot 23`] = ` +Object { + "detail": Array [ + Array [ + "network", + String( + This is a problem related to network connectivity. + In most cases you are behind a proxy or have bad network settings. + + If you are behind a proxy, please make sure that the + 'proxy' config is set properly. See: 'npm help config' + ), + ], + ], + "summary": Array [ + Array [ + "network", + "foo", + ], + ], +} +` + exports[`test/lib/utils/error-message.js TAP just simple messages > must match snapshot 3`] = ` Object { "detail": Array [ diff --git a/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs b/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs index 8cea8ee17e5ea..eb383c104a674 100644 --- a/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs +++ b/tap-snapshots/test/lib/utils/exit-handler.js.test.cjs @@ -6,15 +6,15 @@ */ 'use strict' exports[`test/lib/utils/exit-handler.js TAP handles unknown error > should have expected log contents for unknown error 1`] = ` -0 verbose stack Error: ERROR -1 verbose cwd {CWD} -2 verbose Foo 1.0.0 -3 verbose argv "/node" "{CWD}/test/lib/utils/exit-handler.js" -4 verbose node v1.0.0 -5 verbose npm v1.0.0 -6 error foo code ERROR -7 error foo ERR ERROR -8 error foo ERR ERROR -9 verbose exit 1 +24 verbose stack Error: ERROR +25 verbose cwd {CWD} +26 verbose Foo 1.0.0 +27 verbose argv "/node" "{CWD}/test/lib/utils/exit-handler.js" +28 verbose node v1.0.0 +29 verbose npm v1.0.0 +30 error code ERROR +31 error ERR ERROR +32 error ERR ERROR +33 verbose exit 1 ` diff --git a/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs b/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs index 3987f6a732da5..50f6481f6e848 100644 --- a/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs +++ b/tap-snapshots/test/lib/utils/npm-usage.js.test.cjs @@ -26,10 +26,10 @@ All commands: edit, exec, explain, explore, find-dupes, fund, get, help, hook, init, install, install-ci-test, install-test, link, ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami Specify configs in the ini-formatted file: /some/config/file/.npmrc @@ -62,10 +62,10 @@ All commands: edit, exec, explain, explore, find-dupes, fund, get, help, hook, init, install, install-ci-test, install-test, link, ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami Specify configs in the ini-formatted file: /some/config/file/.npmrc @@ -98,10 +98,10 @@ All commands: edit, exec, explain, explore, find-dupes, fund, get, help, hook, init, install, install-ci-test, install-test, link, ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami Specify configs in the ini-formatted file: /some/config/file/.npmrc @@ -134,10 +134,10 @@ All commands: edit, exec, explain, explore, find-dupes, fund, get, help, hook, init, install, install-ci-test, install-test, link, ll, login, logout, ls, org, outdated, owner, pack, ping, - prefix, profile, prune, publish, rebuild, repo, restart, - root, run-script, search, set, set-script, shrinkwrap, star, - stars, start, stop, team, test, token, uninstall, unpublish, - unstar, update, version, view, whoami + pkg, prefix, profile, prune, publish, rebuild, repo, + restart, root, run-script, search, set, set-script, + shrinkwrap, star, stars, start, stop, team, test, token, + uninstall, unpublish, unstar, update, version, view, whoami Specify configs in the ini-formatted file: /some/config/file/.npmrc @@ -235,7 +235,7 @@ All commands: npm bugs [] Options: - [--browser|--browser ] [--registry ] + [--no-browser|--browser ] [--registry ] alias: issues @@ -267,7 +267,7 @@ All commands: npm ci Options: - [--ignore-scripts] [--script-shell ] + [--no-audit] [--ignore-scripts] [--script-shell ] aliases: clean-install, ic, install-clean, isntall-clean @@ -294,7 +294,8 @@ All commands: npm config edit Options: - [--json] [-g|--global] [--editor ] [-l|--long] + [--json] [-g|--global] [--editor ] [-L|--location ] + [-l|--long] alias: c @@ -308,9 +309,9 @@ All commands: npm dedupe Options: - [--global-style] [--legacy-bundling] [--strict-peer-deps] [--package-lock] + [--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] - [--audit] [--bin-links] [--fund] [--dry-run] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -372,7 +373,7 @@ All commands: npm docs [ [ ...]] Options: - [--browser|--browser ] [--registry ] + [--no-browser|--browser ] [--registry ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -458,9 +459,9 @@ All commands: npm find-dupes Options: - [--global-style] [--legacy-bundling] [--strict-peer-deps] [--package-lock] + [--global-style] [--legacy-bundling] [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] - [--audit] [--bin-links] [--fund] + [--no-audit] [--no-bin-links] [--no-fund] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -474,7 +475,7 @@ All commands: npm fund [[<@scope>/]] Options: - [--json] [--browser|--browser ] [--unicode] + [--json] [--no-browser|--browser ] [--unicode] [-w|--workspace [-w|--workspace ...]] [--which ] @@ -555,9 +556,9 @@ All commands: Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] - [--strict-peer-deps] [--package-lock] + [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] - [--audit] [--bin-links] [--fund] [--dry-run] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -573,7 +574,7 @@ All commands: npm install-ci-test Options: - [--ignore-scripts] [--script-shell ] + [--no-audit] [--ignore-scripts] [--script-shell ] alias: cit @@ -598,9 +599,9 @@ All commands: Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] - [--strict-peer-deps] [--package-lock] + [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] - [--audit] [--bin-links] [--fund] [--dry-run] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -619,9 +620,9 @@ All commands: Options: [-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer] [-E|--save-exact] [-g|--global] [--global-style] [--legacy-bundling] - [--strict-peer-deps] [--package-lock] + [--strict-peer-deps] [--no-package-lock] [--omit [--omit ...]] [--ignore-scripts] - [--audit] [--bin-links] [--fund] [--dry-run] + [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -762,6 +763,22 @@ All commands: Run "npm help ping" for more info + pkg npm pkg + + Manages your package.json + + Usage: + npm pkg set = [= ...] + npm pkg get [ [ ...]] + npm pkg delete [ ...] + + Options: + [-f|--force] [--json] + [-w|--workspace [-w|--workspace ...]] + [-ws|--workspaces] + + Run "npm help pkg" for more info + prefix npm prefix Display prefix @@ -825,7 +842,7 @@ All commands: npm rebuild [[<@scope>/][@] ...] Options: - [-g|--global] [--bin-links] [--ignore-scripts] + [-g|--global] [--no-bin-links] [--ignore-scripts] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -841,7 +858,7 @@ All commands: npm repo [ [ ...]] Options: - [--browser|--browser ] + [--no-browser|--browser ] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -895,7 +912,7 @@ All commands: npm search [search terms ...] Options: - [-l|--long] [--json] [--color|--color ] [-p|--parseable] + [-l|--long] [--json] [--color|--no-color|--color always] [-p|--parseable] [--no-description] [--searchopts ] [--searchexclude ] [--registry ] [--prefer-online] [--prefer-offline] [--offline] @@ -1078,8 +1095,8 @@ All commands: Options: [-g|--global] [--global-style] [--legacy-bundling] [--strict-peer-deps] - [--package-lock] [--omit [--omit ...]] - [--ignore-scripts] [--audit] [--bin-links] [--fund] [--dry-run] + [--no-package-lock] [--omit [--omit ...]] + [--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] @@ -1095,7 +1112,7 @@ All commands: npm version [ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git] Options: - [--allow-same-version] [--commit-hooks] [--git-tag-version] [--json] + [--allow-same-version] [--no-commit-hooks] [--no-git-tag-version] [--json] [--preid prerelease-id] [--sign-git-tag] [-w|--workspace [-w|--workspace ...]] [-ws|--workspaces] diff --git a/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs b/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs index 91228650d47de..157390997d793 100644 --- a/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs +++ b/tap-snapshots/test/lib/utils/update-notifier.js.test.cjs @@ -6,11 +6,11 @@ */ 'use strict' exports[`test/lib/utils/update-notifier.js TAP notification situations major to current > color 1`] = ` - -New major version of npm available! 122.420.69 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New major version of npm available! 122.420.69 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations major to current > no color 1`] = ` @@ -22,11 +22,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to current > color 1`] = ` - -New minor version of npm available! 123.419.69 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New minor version of npm available! 123.419.69 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to current > no color 1`] = ` @@ -38,11 +38,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to next version > color 1`] = ` - -New minor version of npm available! 123.420.70 -> 123.421.70 -Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 -Run npm install -g npm@123.421.70 to update! - + +New minor version of npm available! 123.420.70 -> 123.421.70 +Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 +Run npm install -g npm@123.421.70 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations minor to next version > no color 1`] = ` @@ -54,11 +54,11 @@ Run \`npm install -g npm@123.421.70\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations new beta available > color 1`] = ` - -New prerelease version of npm available! 124.0.0-beta.0 -> 124.0.0-beta.99999 -Changelog: https://github.com/npm/cli/releases/tag/v124.0.0-beta.99999 -Run npm install -g npm@124.0.0-beta.99999 to update! - + +New prerelease version of npm available! 124.0.0-beta.0 -> 124.0.0-beta.99999 +Changelog: https://github.com/npm/cli/releases/tag/v124.0.0-beta.99999 +Run npm install -g npm@124.0.0-beta.99999 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations new beta available > no color 1`] = ` @@ -70,11 +70,11 @@ Run \`npm install -g npm@124.0.0-beta.99999\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to current > color 1`] = ` - -New patch version of npm available! 123.420.68 -> 123.420.69 -Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 -Run npm install -g npm@123.420.69 to update! - + +New patch version of npm available! 123.420.68 -> 123.420.69 +Changelog: https://github.com/npm/cli/releases/tag/v123.420.69 +Run npm install -g npm@123.420.69 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to current > no color 1`] = ` @@ -86,11 +86,11 @@ Run \`npm install -g npm@123.420.69\` to update! ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to next version > color 1`] = ` - -New patch version of npm available! 123.421.69 -> 123.421.70 -Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 -Run npm install -g npm@123.421.70 to update! - + +New patch version of npm available! 123.421.69 -> 123.421.70 +Changelog: https://github.com/npm/cli/releases/tag/v123.421.70 +Run npm install -g npm@123.421.70 to update! + ` exports[`test/lib/utils/update-notifier.js TAP notification situations patch to next version > no color 1`] = ` diff --git a/test/fixtures/mock-npm.js b/test/fixtures/mock-npm.js index 1de080eb10b4a..e3be10b4b9aa3 100644 --- a/test/fixtures/mock-npm.js +++ b/test/fixtures/mock-npm.js @@ -1,20 +1,19 @@ const npmlog = require('npmlog') -const perf = require('../../lib/utils/perf.js') -perf.reset() const procLog = require('../../lib/utils/proc-log-listener.js') procLog.reset() const realLog = {} -for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) +for (const level in npmlog.levels) realLog[level] = npmlog[level] const { title, execPath } = process const RealMockNpm = (t, otherMocks = {}) => { t.teardown(() => { - for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) + npm.perfStop() + npmlog.record.length = 0 + for (const level in npmlog.levels) npmlog[level] = realLog[level] - perf.reset() procLog.reset() process.title = title process.execPath = execPath @@ -33,9 +32,14 @@ const RealMockNpm = (t, otherMocks = {}) => { }) }) } - for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) { + for (const level in npmlog.levels) { npmlog[level] = (...msg) => { logs.push([level, ...msg]) + + const l = npmlog.level + npmlog.level = 'silent' + realLog[level](...msg) + npmlog.level = l } } npm.output = (...msg) => outputs.push(msg) diff --git a/test/lib/config.js b/test/lib/config.js index 6c04293137af9..8a1e7d85e09aa 100644 --- a/test/lib/config.js +++ b/test/lib/config.js @@ -47,8 +47,8 @@ const defaults = { const cliConfig = { editor: 'vi', json: false, + location: 'user', long: false, - global: false, cat: true, chai: true, dog: true, @@ -198,8 +198,8 @@ t.test('config list --json', t => { { editor: 'vi', json: true, + location: 'user', long: false, - global: false, cat: true, chai: true, dog: true, @@ -265,7 +265,7 @@ t.test('config delete multiple key', t => { }) }) -t.test('config delete key --global', t => { +t.test('config delete key --location=global', t => { t.plan(4) npm.config.delete = (key, where) => { @@ -277,13 +277,13 @@ t.test('config delete key --global', t => { t.equal(where, 'global', 'should save global config post-delete') } - cliConfig.global = true + cliConfig.location = 'global' config.exec(['delete', 'foo'], (err) => { - t.error(err, 'npm config delete key --global') + t.error(err, 'npm config delete key --location=global') }) t.teardown(() => { - cliConfig.global = false + cliConfig.location = 'user' delete npm.config.delete delete npm.config.save }) @@ -419,7 +419,7 @@ t.test('config set invalid key', t => { }) }) -t.test('config set key --global', t => { +t.test('config set key --location=global', t => { t.plan(5) npm.config.set = (key, val, where) => { @@ -432,13 +432,13 @@ t.test('config set key --global', t => { t.equal(where, 'global', 'should save global config') } - cliConfig.global = true + cliConfig.location = 'global' config.exec(['set', 'foo', 'bar'], (err) => { - t.error(err, 'npm config set key --global') + t.error(err, 'npm config set key --location=global') }) t.teardown(() => { - cliConfig.global = false + cliConfig.location = 'user' delete npm.config.set delete npm.config.save }) @@ -583,10 +583,10 @@ sign-git-commit=true` }) }) -t.test('config edit --global', t => { +t.test('config edit --location=global', t => { t.plan(6) - cliConfig.global = true + cliConfig.location = 'global' const npmrc = 'init.author.name=Foo' npm.config.data.set('global', { source: '/etc/npmrc', @@ -626,7 +626,7 @@ t.test('config edit --global', t => { }) t.teardown(() => { - cliConfig.global = false + cliConfig.location = 'user' npm.config.data.delete('user') delete npm.config.save }) diff --git a/test/lib/link.js b/test/lib/link.js index 736d18cab9906..96f689892ff83 100644 --- a/test/lib/link.js +++ b/test/lib/link.js @@ -30,7 +30,7 @@ const printLinks = async (opts) => { const linkedItems = [...tree.inventory.values()] .sort((a, b) => a.pkgid.localeCompare(b.pkgid, 'en')) for (const item of linkedItems) { - if (item.target) + if (item.isLink) res += `${item.path} -> ${item.target.path}\n` } return res diff --git a/test/lib/npm.js b/test/lib/npm.js index 291a58955ceed..03bb46d8d8451 100644 --- a/test/lib/npm.js +++ b/test/lib/npm.js @@ -476,3 +476,28 @@ t.test('set process.title', t => { t.end() }) + +t.test('timings', t => { + const { npm, logs } = mockNpm(t) + process.emit('time', 'foo') + process.emit('time', 'bar') + t.match(npm.timers.get('foo'), Number, 'foo timer is a number') + t.match(npm.timers.get('bar'), Number, 'foo timer is a number') + process.emit('timeEnd', 'foo') + process.emit('timeEnd', 'bar') + process.emit('timeEnd', 'baz') + t.match(logs, [ + ['timing', 'foo', /Completed in [0-9]+ms/], + ['timing', 'bar', /Completed in [0-9]+ms/], + [ + 'silly', + 'timing', + "Tried to end timer that doesn't exist:", + 'baz', + ], + ]) + t.notOk(npm.timers.has('foo'), 'foo timer is gone') + t.notOk(npm.timers.has('bar'), 'bar timer is gone') + t.match(npm.timings, { foo: Number, bar: Number }) + t.end() +}) diff --git a/test/lib/pkg.js b/test/lib/pkg.js new file mode 100644 index 0000000000000..688df6859054a --- /dev/null +++ b/test/lib/pkg.js @@ -0,0 +1,737 @@ +const { resolve } = require('path') +const { readFileSync } = require('fs') +const t = require('tap') +const { fake: mockNpm } = require('../fixtures/mock-npm') + +const redactCwd = (path) => { + const normalizePath = p => p + .replace(/\\+/g, '/') + .replace(/\r\n/g, '\n') + return normalizePath(path) + .replace(new RegExp(normalizePath(process.cwd()), 'g'), '{CWD}') +} + +t.cleanSnapshot = (str) => redactCwd(str) + +let OUTPUT = '' +const config = { + global: false, + force: false, + 'pkg-cast': 'string', +} +const npm = mockNpm({ + localPrefix: t.testdirName, + config, + output: (str) => { + OUTPUT += str + }, +}) + +const Pkg = require('../../lib/pkg.js') +const pkg = new Pkg(npm) + +const readPackageJson = (path) => { + path = path || npm.localPrefix + return JSON.parse(readFileSync(resolve(path, 'package.json'), 'utf8')) +} + +t.afterEach(() => { + config.global = false + config.json = false + npm.localPrefix = t.testdirName + OUTPUT = '' +}) + +t.test('no args', t => { + pkg.exec([], err => { + t.match( + err, + { code: 'EUSAGE' }, + 'should throw usage error' + ) + t.end() + }) +}) + +t.test('no global mode', t => { + config.global = true + pkg.exec(['get', 'foo'], err => { + t.match( + err, + { code: 'EPKGGLOBAL' }, + 'should throw no global mode error' + ) + t.end() + }) +}) + +t.test('get no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + pkg.exec(['get'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + { + name: 'foo', + version: '1.1.1', + }, + 'should print package.json content' + ) + t.end() + }) +}) + +t.test('get single arg', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + pkg.exec(['get', 'version'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + '1.1.1', + 'should print retrieved package.json field' + ) + t.end() + }) +}) + +t.test('get nested arg', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + scripts: { + test: 'node test.js', + }, + }), + }) + + pkg.exec(['get', 'scripts.test'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + 'node test.js', + 'should print retrieved nested field' + ) + t.end() + }) +}) + +t.test('get array field', t => { + const files = [ + 'index.js', + 'cli.js', + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }) + + pkg.exec(['get', 'files'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + files, + 'should print retrieved array field' + ) + t.end() + }) +}) + +t.test('get array item', t => { + const files = [ + 'index.js', + 'cli.js', + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + files, + }), + }) + + pkg.exec(['get', 'files[0]'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + 'index.js', + 'should print retrieved array field' + ) + t.end() + }) +}) + +t.test('get array nested items notation', t => { + const contributors = [ + { + name: 'Ruy', + url: 'http://example.com/ruy', + }, + { + name: 'Gar', + url: 'http://example.com/gar', + }, + ] + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + contributors, + }), + }) + + pkg.exec(['get', 'contributors.name'], err => { + if (err) + throw err + + t.strictSame( + JSON.parse(OUTPUT), + { + 'contributors[0].name': 'Ruy', + 'contributors[1].name': 'Gar', + }, + 'should print json result containing matching results' + ) + t.end() + }) +}) + +t.test('set no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set'], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if no args' + ) + + t.end() + }) +}) + +t.test('set missing value', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set', 'key='], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if missing value' + ) + + t.end() + }) +}) + +t.test('set missing key', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['set', '=value'], err => { + t.match( + err, + { code: 'EPKGSET' }, + 'should throw an error if missing key' + ) + + t.end() + }) +}) + +t.test('set single field', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'description=Awesome stuff'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + description: 'Awesome stuff', + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('push to array syntax', t => { + const json = { + name: 'foo', + version: '1.1.1', + keywords: [ + 'foo', + ], + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'keywords[]=bar', 'keywords[]=baz'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + keywords: [ + 'foo', + 'bar', + 'baz', + ], + }, + 'should append to arrays using empty bracket syntax' + ) + t.end() + }) +}) + +t.test('set multiple fields', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'bin.foo=foo.js', 'scripts.test=node test.js'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + bin: { + foo: 'foo.js', + }, + scripts: { + test: 'node test.js', + }, + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('set = separate value', t => { + const json = { + name: 'foo', + version: '1.1.1', + } + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify(json), + }) + + pkg.exec(['set', 'tap[test-env][0]=LC_ALL=sk'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + ...json, + tap: { + 'test-env': [ + 'LC_ALL=sk', + ], + }, + }, + 'should add single field to package.json' + ) + t.end() + }) +}) + +t.test('set --json', async t => { + config.json = true + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.1.1', + }), + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'private=true'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + }, + 'should add boolean field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'tap.timeout=60'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + tap: { + timeout: 60, + }, + }, + 'should add number field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'foo={ "bar": { "baz": "BAZ" } }'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'workspaces=["packages/*"]'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + private: true, + workspaces: [ + 'packages/*', + ], + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.exec(['set', 'description="awesome"'], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.1.1', + description: 'awesome', + private: true, + workspaces: [ + 'packages/*', + ], + tap: { + timeout: 60, + }, + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add object field to package.json' + ) + res() + }) + }) +}) + +t.test('delete no args', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['delete'], err => { + t.match( + err, + { code: 'EPKGDELETE' }, + 'should throw an error if deleting no args' + ) + + t.end() + }) +}) + +t.test('delete invalid key', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ name: 'foo' }), + }) + pkg.exec(['delete', ''], err => { + t.match( + err, + { code: 'EPKGDELETE' }, + 'should throw an error if deleting invalid args' + ) + + t.end() + }) +}) + +t.test('delete single field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + }), + }) + pkg.exec(['delete', 'version'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + }, + 'should delete single field from package.json' + ) + + t.end() + }) +}) + +t.test('delete multiple field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + description: 'awesome', + }), + }) + pkg.exec(['delete', 'version', 'description'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + }, + 'should delete multiple fields from package.json' + ) + + t.end() + }) +}) + +t.test('delete nested field', t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'foo', + version: '1.0.0', + info: { + foo: { + bar: [ + { + baz: 'deleteme', + }, + ], + }, + }, + }), + }) + pkg.exec(['delete', 'info.foo.bar[0].baz'], err => { + if (err) + throw err + + t.strictSame( + readPackageJson(), + { + name: 'foo', + version: '1.0.0', + info: { + foo: { + bar: [ + {}, + ], + }, + }, + }, + 'should delete nested fields from package.json' + ) + + t.end() + }) +}) + +t.test('workspaces', async t => { + npm.localPrefix = t.testdir({ + 'package.json': JSON.stringify({ + name: 'root', + version: '1.0.0', + workspaces: [ + 'packages/*', + ], + }), + packages: { + a: { + 'package.json': JSON.stringify({ + name: 'a', + version: '1.0.0', + }), + }, + b: { + 'package.json': JSON.stringify({ + name: 'b', + version: '1.2.3', + }), + }, + }, + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['get', 'name', 'version'], [], err => { + if (err) + rej(err) + + t.strictSame( + JSON.parse(OUTPUT), + { + a: { + name: 'a', + version: '1.0.0', + }, + b: { + name: 'b', + version: '1.2.3', + }, + }, + 'should return expected result for configured workspaces' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['set', 'funding=http://example.com'], [], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/a')), + { + name: 'a', + version: '1.0.0', + funding: 'http://example.com', + }, + 'should add field to workspace a' + ) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/b')), + { + name: 'b', + version: '1.2.3', + funding: 'http://example.com', + }, + 'should add field to workspace b' + ) + res() + }) + }) + + await new Promise((res, rej) => { + pkg.execWorkspaces(['delete', 'version'], [], err => { + if (err) + rej(err) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/a')), + { + name: 'a', + funding: 'http://example.com', + }, + 'should delete version field from workspace a' + ) + + t.strictSame( + readPackageJson(resolve(npm.localPrefix, 'packages/b')), + { + name: 'b', + funding: 'http://example.com', + }, + 'should delete version field from workspace b' + ) + res() + }) + }) +}) diff --git a/test/lib/publish.js b/test/lib/publish.js index 56590478fc1ae..4aa3e5592751e 100644 --- a/test/lib/publish.js +++ b/test/lib/publish.js @@ -762,3 +762,103 @@ t.test('private workspaces', (t) => { t.end() }) + +t.test('runs correct lifecycle scripts', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + scripts: { + prepublishOnly: 'echo test prepublishOnly', + prepublish: 'echo test prepublish', // should NOT run this one + publish: 'echo test publish', + postpublish: 'echo test postpublish', + }, + }, null, 2), + }) + + const scripts = [] + const Publish = t.mock('../../lib/publish.js', { + '@npmcli/run-script': (args) => { + scripts.push(args) + }, + '../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, + }, + libnpmpublish: { + publish: () => { + t.pass('publish called') + }, + }, + }) + const npm = mockNpm({ + output: () => { + t.pass('output is called') + }, + }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + publish.exec([testDir], (er) => { + if (er) + throw er + t.same( + scripts.map(s => s.event), + ['prepublishOnly', 'publish', 'postpublish'], + 'runs only expected scripts, in order' + ) + t.end() + }) +}) + +t.test('does not run scripts on --ignore-scripts', t => { + const testDir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'my-cool-pkg', + version: '1.0.0', + }, null, 2), + }) + + const Publish = t.mock('../../lib/publish.js', { + '@npmcli/run-script': () => { + t.fail('should not call run-script') + }, + '../../lib/utils/tar.js': { + getContents: () => ({ + id: 'someid', + }), + logTar: () => { + t.pass('logTar is called') + }, + }, + libnpmpublish: { + publish: () => { + t.pass('publish called') + }, + }, + }) + const npm = mockNpm({ + config: { 'ignore-scripts': true }, + output: () => { + t.pass('output is called') + }, + }) + npm.config.getCredentialsByURI = (uri) => { + t.same(uri, npm.config.get('registry'), 'gets credentials for expected registry') + return { token: 'some.registry.token' } + } + const publish = new Publish(npm) + publish.exec([testDir], (er) => { + if (er) + throw er + t.pass('got to callback') + t.end() + }) +}) diff --git a/test/lib/utils/config/definition.js b/test/lib/utils/config/definition.js index 45f4c977a77a0..a17a1a09a2240 100644 --- a/test/lib/utils/config/definition.js +++ b/test/lib/utils/config/definition.js @@ -95,6 +95,13 @@ t.test('basic definition', async t => { description: 'test description', }) t.equal(hasShort.usage, '-t|--key ') + const multiHasShort = new Definition('key', { + default: 'test default', + short: 't', + type: [null, String], + description: 'test description', + }) + t.equal(multiHasShort.usage, '-t|--key ') const hardCodedTypes = new Definition('key', { default: 'test default', type: ['string1', 'string2'], diff --git a/test/lib/utils/config/definitions.js b/test/lib/utils/config/definitions.js index 8724f0e3bd3eb..63d9bbd195ab2 100644 --- a/test/lib/utils/config/definitions.js +++ b/test/lib/utils/config/definitions.js @@ -463,7 +463,7 @@ t.test('search options', t => { t.end() }) -t.test('noProxy', t => { +t.test('noProxy - array', t => { const obj = { noproxy: ['1.2.3.4,2.3.4.5', '3.4.5.6'] } const flat = {} definitions.noproxy.flatten('noproxy', obj, flat) @@ -471,6 +471,14 @@ t.test('noProxy', t => { t.end() }) +t.test('noProxy - string', t => { + const obj = { noproxy: '1.2.3.4,2.3.4.5,3.4.5.6' } + const flat = {} + definitions.noproxy.flatten('noproxy', obj, flat) + t.strictSame(flat, { noProxy: '1.2.3.4,2.3.4.5,3.4.5.6' }) + t.end() +}) + t.test('maxSockets', t => { const obj = { maxsockets: 123 } const flat = {} @@ -797,3 +805,26 @@ t.test('save-exact', t => { t.strictSame(flat, { savePrefix: '~1.2.3' }) t.end() }) + +t.test('location', t => { + const obj = { + global: true, + location: 'user', + } + const flat = {} + definitions.location.flatten('location', obj, flat) + // global = true sets location in both places to global + t.strictSame(flat, { location: 'global' }) + t.strictSame(obj, { global: true, location: 'global' }) + + obj.global = false + obj.location = 'user' + delete flat.global + delete flat.location + + definitions.location.flatten('location', obj, flat) + // global = false leaves location unaltered + t.strictSame(flat, { location: 'user' }) + t.strictSame(obj, { global: false, location: 'user' }) + t.end() +}) diff --git a/test/lib/utils/error-message.js b/test/lib/utils/error-message.js index 4f94645a4542d..3fdfb8cc25089 100644 --- a/test/lib/utils/error-message.js +++ b/test/lib/utils/error-message.js @@ -97,6 +97,7 @@ t.test('just simple messages', t => { 'ETOOMANYARGS', 'ETARGET', 'E403', + 'ERR_SOCKET_TIMEOUT', ] t.plan(codes.length) codes.forEach(code => { diff --git a/test/lib/utils/exit-handler.js b/test/lib/utils/exit-handler.js index 06014b67a9754..981ac9a32b684 100644 --- a/test/lib/utils/exit-handler.js +++ b/test/lib/utils/exit-handler.js @@ -1,13 +1,12 @@ /* eslint-disable no-extend-native */ /* eslint-disable no-global-assign */ -const EventEmitter = require('events') -const writeFileAtomic = require('write-file-atomic') const t = require('tap') +const EventEmitter = require('events') +const os = require('os') +const fs = require('fs') +const path = require('path') -// NOTE: Although these unit tests may look like the rest on the surface, -// they are in fact very special due to the amount of things hooking directly -// to global process and variables defined in the module scope. That makes -// for tests that are very interdependent and their order are important. +const { real: mockNpm } = require('../../fixtures/mock-npm') // generic error to be used in tests const err = Object.assign(new Error('ERROR'), { code: 'ERROR' }) @@ -23,69 +22,23 @@ const redactCwd = (path) => { t.cleanSnapshot = (str) => redactCwd(str) -// internal modules mocks const cacheFolder = t.testdir({}) -const config = { - values: { - cache: cacheFolder, - timing: true, - }, - loaded: true, - updateNotification: null, - get (key) { - return this.values[key] - }, -} - -const npm = { - version: '1.0.0', - config, - shelloutCommands: ['exec', 'run-script'], -} +const logFile = path.resolve(cacheFolder, '_logs', 'expecteddate-debug.log') +const timingFile = path.resolve(cacheFolder, '_timing.json') -const npmlog = { - disableProgress: () => null, - log (level, ...args) { - this.record.push({ - id: this.record.length, - level, - message: args.reduce((res, i) => `${res} ${i.message ? i.message : i}`, ''), - prefix: level !== 'verbose' ? 'foo' : '', - }) - }, - error (...args) { - this.log('error', ...args) - }, - info (...args) { - this.log('info', ...args) - }, - level: 'silly', - levels: { - silly: 0, - verbose: 1, - info: 2, - error: 3, - silent: 4, - }, - notice (...args) { - this.log('notice', ...args) - }, - record: [], - verbose (...args) { - this.log('verbose', ...args) - }, -} +const { npm } = mockNpm(t) -// overrides OS type/release for cross platform snapshots -const os = require('os') -os.type = () => 'Foo' -os.release = () => '1.0.0' +t.before(async () => { + npm.version = '1.0.0' + await npm.load() + npm.config.set('cache', cacheFolder) +}) -// bootstrap tap before cutting off process ref -t.test('ok', (t) => { +t.test('bootstrap tap before cutting off process ref', (t) => { t.ok('ok') t.end() }) + // cut off process from script so that it won't quit the test runner // while trying to run through the myriad of cases const _process = process @@ -95,9 +48,11 @@ process = Object.assign( argv: ['/node', ..._process.argv.slice(1)], cwd: _process.cwd, env: _process.env, - exit () {}, - exitCode: 0, version: 'v1.0.0', + exit: (code) => { + process.exitCode = code || process.exitCode || 0 + process.emit('exit', process.exitCode) + }, stdout: { write (_, cb) { cb() } }, @@ -105,19 +60,40 @@ process = Object.assign( hrtime: _process.hrtime, } ) -// needs to put process back in its place -// in order for tap to exit properly + +const osType = os.type +const osRelease = os.release +// overrides OS type/release for cross platform snapshots +os.type = () => 'Foo' +os.release = () => '1.0.0' + +// generates logfile name with mocked date +const _toISOString = Date.prototype.toISOString +Date.prototype.toISOString = () => 'expecteddate' + +const consoleError = console.error +const errors = [] +console.error = (err) => { + errors.push(err) +} t.teardown(() => { + os.type = osType + os.release = osRelease + // needs to put process back in its place in order for tap to exit properly process = _process + Date.prototype.toISOString = _toISOString + console.error = consoleError }) t.afterEach(() => { + errors.length = 0 + npm.log.level = 'silent' // clear out the 'A complete log' message - npmlog.record.length = 0 + npm.log.record.length = 0 + delete process.exitCode }) const mocks = { - npmlog, '../../../lib/utils/error-message.js': (err) => ({ ...err, summary: [['ERR', err.message]], @@ -125,102 +101,57 @@ const mocks = { }), } -let exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) +const exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) exitHandler.setNpm(npm) -t.test('default exit code', (t) => { - t.plan(1) - - // manually simulate timing handlers - process.emit('timing', 'foo', 1) - process.emit('timing', 'foo', 2) - - // generates logfile name with mocked date - const _toISOString = Date.prototype.toISOString - Date.prototype.toISOString = () => 'expecteddate' - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should default to error code 1') - } - - // skip console.error logs - const _error = console.error - console.error = () => null - +t.test('exit handler never called - loglevel silent', (t) => { + npm.log.level = 'silent' process.emit('exit', 1) + const logData = fs.readFileSync(logFile, 'utf8') + t.match(logData, 'Exit handler never called!') + t.match(errors, [''], 'logs one empty string to console.error') + t.end() +}) - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - console.error = _error - Date.prototype.toISOString = _toISOString - }) +t.test('exit handler never called - loglevel notice', (t) => { + npm.log.level = 'notice' + process.emit('exit', 1) + const logData = fs.readFileSync(logFile, 'utf8') + t.match(logData, 'Exit handler never called!') + t.match(errors, ['', ''], 'logs two empty strings to console.error') + t.end() }) t.test('handles unknown error', (t) => { t.plan(2) - const _toISOString = Date.prototype.toISOString - Date.prototype.toISOString = () => 'expecteddate' - - const sync = writeFileAtomic.sync - writeFileAtomic.sync = (filename, content) => { - t.equal( - redactCwd(filename), - '{CWD}/test/lib/utils/tap-testdir-exit-handler/_logs/expecteddate-debug.log', - 'should use expected log filename' - ) - t.matchSnapshot( - content, - 'should have expected log contents for unknown error' - ) - } - - exitHandler(err) + npm.log.level = 'notice' - t.teardown(() => { - writeFileAtomic.sync = sync - Date.prototype.toISOString = _toISOString + process.once('timeEnd', (msg) => { + t.equal(msg, 'npm', 'should trigger timeEnd for npm') }) + + exitHandler(err) + const logData = fs.readFileSync(logFile, 'utf8') + t.matchSnapshot( + logData, + 'should have expected log contents for unknown error' + ) t.end() }) -t.test('npm.config not ready', (t) => { +t.test('fail to write logfile', (t) => { t.plan(1) - config.loaded = false - - const _error = console.error - console.error = (msg) => { - t.match( - msg, - /Error: Exit prior to config file resolving./, - 'should exit with config error msg' - ) - } - - exitHandler() - t.teardown(() => { - console.error = _error - config.loaded = true + npm.config.set('cache', cacheFolder) }) -}) - -t.test('fail to write logfile', (t) => { - t.plan(1) const badDir = t.testdir({ _logs: 'is a file', }) - config.values.cache = badDir - - t.teardown(() => { - config.values.cache = cacheFolder - }) + npm.config.set('cache', badDir) t.doesNotThrow( () => exitHandler(err), @@ -231,241 +162,166 @@ t.test('fail to write logfile', (t) => { t.test('console.log output using --json', (t) => { t.plan(1) - config.values.json = true - - const _error = console.error - console.error = (jsonOutput) => { - t.same( - JSON.parse(jsonOutput), - { - error: { - code: 'EBADTHING', // should default error code to E[A-Z]+ - summary: 'Error: EBADTHING Something happened', - detail: 'Error: EBADTHING Something happened', - }, - }, - 'should output expected json output' - ) - } - - exitHandler(new Error('Error: EBADTHING Something happened')) - + npm.config.set('json', true) t.teardown(() => { - console.error = _error - delete config.values.json + npm.config.set('json', false) }) + + exitHandler(new Error('Error: EBADTHING Something happened')) + t.same( + JSON.parse(errors[0]), + { + error: { + code: 'EBADTHING', // should default error code to E[A-Z]+ + summary: 'Error: EBADTHING Something happened', + detail: 'Error: EBADTHING Something happened', + }, + }, + 'should output expected json output' + ) }) t.test('throw a non-error obj', (t) => { - t.plan(3) + t.plan(2) const weirdError = { code: 'ESOMETHING', message: 'foo bar', } - const _logError = npmlog.error - npmlog.error = (title, err) => { - t.equal(title, 'weird error', 'should name it a weird error') - t.same(err, weirdError, 'should log given weird error') - } - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should exit with code 1') - } - - exitHandler(weirdError) - - t.teardown(() => { - process.exit = _exit - npmlog.error = _logError + process.once('exit', code => { + t.equal(code, 1, 'exits with exitCode 1') }) + exitHandler(weirdError) + t.match( + npm.log.record.find(r => r.level === 'error'), + { message: 'foo bar' } + ) }) t.test('throw a string error', (t) => { - t.plan(3) - + t.plan(2) const error = 'foo bar' - const _logError = npmlog.error - npmlog.error = (title, err) => { - t.equal(title, '', 'should have an empty name ref') - t.same(err, 'foo bar', 'should log string error') - } - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should exit with code 1') - } - - exitHandler(error) - - t.teardown(() => { - process.exit = _exit - npmlog.error = _logError + process.once('exit', code => { + t.equal(code, 1, 'exits with exitCode 1') }) + exitHandler(error) + t.match( + npm.log.record.find(r => r.level === 'error'), + { message: 'foo bar' } + ) }) t.test('update notification', (t) => { - t.plan(2) - const updateMsg = 'you should update npm!' npm.updateNotification = updateMsg - - const _notice = npmlog.notice - npmlog.notice = (prefix, msg) => { - t.equal(prefix, '', 'should have no prefix') - t.equal(msg, updateMsg, 'should show update message') - } - - exitHandler(err) + npm.log.level = 'silent' t.teardown(() => { - npmlog.notice = _notice delete npm.updateNotification }) + + exitHandler() + t.match( + npm.log.record.find(r => r.level === 'notice'), + { message: 'you should update npm!' } + ) + t.end() }) -t.test('on exit handler', (t) => { - t.plan(2) +t.test('npm.config not ready', (t) => { + t.plan(1) - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 1, 'should default to error code 1') - } + const { npm: unloaded } = mockNpm(t) - process.once('timeEnd', (msg) => { - t.equal(msg, 'npm', 'should trigger timeEnd for npm') + t.teardown(() => { + exitHandler.setNpm(npm) }) - // skip console.error logs - const _error = console.error - console.error = () => null + exitHandler.setNpm(unloaded) - process.emit('exit', 1) - - t.teardown(() => { - console.error = _error - process.exit = _exit - }) + exitHandler() + t.match( + errors[0], + /Error: Exit prior to config file resolving./, + 'should exit with config error msg' + ) + t.end() }) -t.test('it worked', (t) => { - t.plan(2) +t.test('timing', (t) => { + npm.config.set('timing', true) - config.values.timing = false - - const _exit = process.exit - process.exit = (code) => { - process.exit = _exit - t.notOk(code, 'should exit with no code') + t.teardown(() => { + fs.unlinkSync(timingFile) + npm.config.set('timing', false) + }) - const _info = npmlog.info - npmlog.info = (msg) => { - npmlog.info = _info - t.equal(msg, 'ok', 'should log ok if "it worked"') - } + exitHandler() + const timingData = JSON.parse(fs.readFileSync(timingFile, 'utf8')) + t.match(timingData, { version: '1.0.0', 'config:load:defaults': Number }) + t.end() +}) - process.emit('exit', 0) - } +t.test('timing - with error', (t) => { + npm.config.set('timing', true) t.teardown(() => { - process.exit = _exit - config.values.timing = true + fs.unlinkSync(timingFile) + npm.config.set('timing', false) }) - exitHandler() + exitHandler(err) + const timingData = JSON.parse(fs.readFileSync(timingFile, 'utf8')) + t.match(timingData, { version: '1.0.0', 'config:load:defaults': Number }) + t.end() }) t.test('uses code from errno', (t) => { t.plan(1) - exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) - exitHandler.setNpm(npm) - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 127, 'should use set errno') - } - + process.once('exit', code => { + t.equal(code, 127, 'should set exitCode from errno') + }) exitHandler(Object.assign( new Error('Error with errno'), { errno: 127, } )) - - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - }) }) -t.test('uses exitCode as code if using a number', (t) => { +t.test('uses code from number', (t) => { t.plan(1) - exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) - exitHandler.setNpm(npm) - - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - t.equal(code, 404, 'should use code if a number') - } - + process.once('exit', code => { + t.equal(code, 404, 'should set exitCode from a number') + }) exitHandler(Object.assign( new Error('Error with code type number'), { code: 404, } )) - - t.teardown(() => { - npmlog.level = 'silly' - process.exit = _exit - }) }) t.test('call exitHandler with no error', (t) => { t.plan(1) - - exitHandler = t.mock('../../../lib/utils/exit-handler.js', mocks) - exitHandler.setNpm(npm) - - const _exit = process.exit - process.exit = (code) => { - t.equal(code, undefined, 'should exit with code undefined') - } - - t.teardown(() => { - process.exit = _exit + process.once('exit', code => { + t.equal(code, 0, 'should end up with exitCode 0 (default)') }) - - exitHandler() -}) - -t.test('exit handler called twice', (t) => { - t.plan(2) - - const _verbose = npmlog.verbose - npmlog.verbose = (key, value) => { - t.equal(key, 'stack', 'should log stack in verbose level') - t.match( - value, - /Error: Exit handler called more than once./, - 'should have expected error msg' - ) - npmlog.verbose = _verbose - } - exitHandler() }) t.test('defaults to log error msg if stack is missing', (t) => { - t.plan(1) + const { npm: unloaded } = mockNpm(t) + t.teardown(() => { + exitHandler.setNpm(npm) + }) + + exitHandler.setNpm(unloaded) const noStackErr = Object.assign( new Error('Error with no stack'), { @@ -475,89 +331,63 @@ t.test('defaults to log error msg if stack is missing', (t) => { ) delete noStackErr.stack - npm.config.loaded = false - - const _error = console.error - console.error = (msg) => { - console.error = _error - npm.config.loaded = true - t.equal(msg, 'Error with no stack', 'should use error msg') - } - exitHandler(noStackErr) + t.equal(errors[0], 'Error with no stack', 'should use error msg') + t.end() }) t.test('exits cleanly when emitting exit event', (t) => { t.plan(1) - npmlog.level = 'silent' - const _exit = process.exit - process.exit = (code) => { - process.exit = _exit - t.same(code, null, 'should exit with code null') - } - - t.teardown(() => { - process.exit = _exit - npmlog.level = 'silly' - }) - + npm.log.level = 'silent' process.emit('exit') + t.match( + npm.log.record.find(r => r.level === 'info'), + { prefix: 'ok', message: '' } + ) + t.end() }) t.test('do no fancy handling for shellouts', t => { - const { exit } = process const { command } = npm - const { log } = npmlog const LOG_RECORD = [] + npm.command = 'exec' + t.teardown(() => { - npmlog.log = log - process.exit = exit npm.command = command }) - - npmlog.log = function (level, ...args) { - log.call(this, level, ...args) - LOG_RECORD.push(npmlog.record[npmlog.record.length - 1]) - } - - npm.command = 'exec' - - let EXPECT_EXIT = 0 - process.exit = code => { - t.equal(code, EXPECT_EXIT, 'got expected exit code') - EXPECT_EXIT = 0 - } t.beforeEach(() => LOG_RECORD.length = 0) - const loudNoises = () => LOG_RECORD + const loudNoises = () => npm.log.record .filter(({ level }) => ['warn', 'error'].includes(level)) t.test('shellout with a numeric error code', t => { - EXPECT_EXIT = 5 + t.plan(2) + process.once('exit', code => { + t.equal(code, 5, 'got expected exit code') + }) exitHandler(Object.assign(new Error(), { code: 5 })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') - // should log no warnings or errors, verbose/silly is fine. t.strictSame(loudNoises(), [], 'no noisy warnings') - t.end() }) t.test('shellout without a numeric error code (something in npm)', t => { - EXPECT_EXIT = 1 + t.plan(2) + process.once('exit', code => { + t.equal(code, 1, 'got expected exit code') + }) exitHandler(Object.assign(new Error(), { code: 'banana stand' })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') // should log some warnings and errors, because something weird happened t.strictNotSame(loudNoises(), [], 'bring the noise') t.end() }) t.test('shellout with code=0 (extra weird?)', t => { - EXPECT_EXIT = 1 + t.plan(2) + process.once('exit', code => { + t.equal(code, 1, 'got expected exit code') + }) exitHandler(Object.assign(new Error(), { code: 0 })) - t.equal(EXPECT_EXIT, 0, 'called process.exit') - // should log some warnings and errors, because something weird happened t.strictNotSame(loudNoises(), [], 'bring the noise') - t.end() }) t.end() diff --git a/test/lib/utils/perf.js b/test/lib/utils/perf.js deleted file mode 100644 index 840dcb6e32399..0000000000000 --- a/test/lib/utils/perf.js +++ /dev/null @@ -1,38 +0,0 @@ -const t = require('tap') -const logs = [] -const npmlog = require('npmlog') -npmlog.silly = (...msg) => logs.push(['silly', ...msg]) -npmlog.timing = (...msg) => logs.push(['timing', ...msg]) - -t.test('time some stuff', t => { - const timings = {} - process.on('timing', (name, value) => { - timings[name] = (timings[name] || 0) + value - }) - require('../../../lib/utils/perf.js') - process.emit('time', 'foo') - process.emit('time', 'bar') - setTimeout(() => { - process.emit('timeEnd', 'foo') - process.emit('timeEnd', 'bar') - process.emit('time', 'foo') - setTimeout(() => { - process.emit('timeEnd', 'foo') - process.emit('timeEnd', 'baz') - t.match(logs, [ - ['timing', 'foo', /Completed in [0-9]+ms/], - ['timing', 'bar', /Completed in [0-9]+ms/], - ['timing', 'foo', /Completed in [0-9]+ms/], - [ - 'silly', - 'timing', - "Tried to end timer that doesn't exist:", - 'baz', - ], - ]) - t.match(timings, { foo: Number, bar: Number }) - t.equal(timings.foo > timings.bar, true, 'foo should be > bar') - t.end() - }, 100) - }, 100) -}) diff --git a/test/lib/utils/queryable.js b/test/lib/utils/queryable.js new file mode 100644 index 0000000000000..bde3ea66238f2 --- /dev/null +++ b/test/lib/utils/queryable.js @@ -0,0 +1,965 @@ +const { inspect } = require('util') +const t = require('tap') +const Queryable = require('../../../lib/utils/queryable.js') + +t.test('retrieve single nested property', async t => { + const fixture = { + foo: { + bar: 'bar', + baz: 'baz', + }, + lorem: { + ipsum: 'ipsum', + }, + } + const q = new Queryable(fixture) + const query = 'foo.bar' + t.strictSame(q.query(query), { [query]: 'bar' }, + 'should retrieve property value when querying for dot-sep name') +}) + +t.test('query', async t => { + const fixture = { + o: 'o', + single: [ + 'item', + ], + w: [ + 'a', + 'b', + 'c', + ], + list: [ + { + name: 'first', + }, + { + name: 'second', + }, + ], + foo: { + bar: 'bar', + baz: 'baz', + }, + lorem: { + ipsum: 'ipsum', + dolor: [ + 'a', + 'b', + 'c', + { + sit: [ + 'amet', + ], + }, + ], + }, + a: [ + [ + [ + { + b: [ + [ + { + c: 'd', + }, + ], + ], + }, + ], + ], + ], + } + const q = new Queryable(fixture) + t.strictSame( + q.query(['foo.baz', 'lorem.dolor[0]']), + { + 'foo.baz': 'baz', + 'lorem.dolor[0]': 'a', + }, + 'should retrieve property values when querying for multiple dot-sep names') + t.strictSame( + q.query('lorem.dolor[3].sit[0]'), + { + 'lorem.dolor[3].sit[0]': 'amet', + }, + 'should retrieve property from nested array items') + t.strictSame( + q.query('a[0][0][0].b[0][0].c'), + { + 'a[0][0][0].b[0][0].c': 'd', + }, + 'should retrieve property from deep nested array items') + t.strictSame( + q.query('o'), + { + o: 'o', + }, + 'should retrieve single level property value') + t.strictSame( + q.query('list.name'), + { + 'list[0].name': 'first', + 'list[1].name': 'second', + }, + 'should automatically expand arrays') + t.strictSame( + q.query(['list.name']), + { + 'list[0].name': 'first', + 'list[1].name': 'second', + }, + 'should automatically expand multiple arrays') + t.strictSame( + q.query('w'), + { + w: ['a', 'b', 'c'], + }, + 'should return arrays') + t.strictSame( + q.query('single'), + { + single: 'item', + }, + 'should return single item') + t.strictSame( + q.query('missing'), + undefined, + 'should return undefined') + t.strictSame( + q.query('missing[bar]'), + undefined, + 'should return undefined also') + t.throws(() => q.query('lorem.dolor[]'), + { code: 'EINVALIDSYNTAX' }, + 'should throw if using empty brackets notation' + ) + t.throws(() => q.query('lorem.dolor[].sit[0]'), + { code: 'EINVALIDSYNTAX' }, + 'should throw if using nested empty brackets notation' + ) + + const qq = new Queryable({ + foo: { + bar: 'bar', + }, + }) + t.strictSame( + qq.query(''), + { + '': { + foo: { + bar: 'bar', + }, + }, + }, + 'should return an object with results in an empty key' + ) +}) + +t.test('missing key', async t => { + const fixture = { + foo: { + bar: 'bar', + }, + } + const q = new Queryable(fixture) + const query = 'foo.missing' + t.equal(q.query(query), undefined, + 'should retrieve no results') +}) + +t.test('no data object', async t => { + t.throws( + () => new Queryable(), + { code: 'ENOQUERYABLEOBJ' }, + 'should throw ENOQUERYABLEOBJ error' + ) + t.throws( + () => new Queryable(1), + { code: 'ENOQUERYABLEOBJ' }, + 'should throw ENOQUERYABLEOBJ error' + ) +}) + +t.test('get values', async t => { + const q = new Queryable({ + foo: { + bar: 'bar', + }, + }) + t.equal(q.get('foo.bar'), 'bar', 'should retrieve value') + t.equal(q.get('missing'), undefined, 'should return undefined') +}) + +t.test('set property values', async t => { + const fixture = { + foo: { + bar: 'bar', + }, + } + const q = new Queryable(fixture) + q.set('foo.baz', 'baz') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + }, + }, + 'should add new property and its assigned value' + ) + q.set('foo[lorem.ipsum]', 'LOREM IPSUM') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + }, + 'should be able to set square brackets props' + ) + q.set('a.b[c.d]', 'omg') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + }, + }, + }, + 'should be able to nest square brackets props' + ) + q.set('a.b[e][f.g][1.0.0]', 'multiple') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + }, + }, + }, + }, + }, + 'should be able to nest multiple square brackets props' + ) + q.set('a.b[e][f.g][2.0.0].author.name', 'Ruy Adorno') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + }, + }, + }, + }, + }, + }, + }, + 'should be able to use dot-sep notation after square bracket props' + ) + q.set('a.b[e][f.g][2.0.0].author[url]', 'https://npmjs.com') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + url: 'https://npmjs.com', + }, + }, + }, + }, + }, + }, + }, + 'should be able to have multiple, separated, square brackets props' + ) + q.set('a.b[e][f.g][2.0.0].author[foo][bar].lorem.ipsum[dolor][sit][amet].omg', 'O_O') + t.strictSame( + q.toJSON(), + { + foo: { + bar: 'bar', + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + a: { + b: { + 'c.d': 'omg', + e: { + 'f.g': { + '1.0.0': 'multiple', + '2.0.0': { + author: { + name: 'Ruy Adorno', + url: 'https://npmjs.com', + foo: { + bar: { + lorem: { + ipsum: { + dolor: { + sit: { + amet: { + omg: 'O_O', + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + 'many many times...' + ) + t.throws( + () => q.set('foo.bar.nest', 'should throw'), + { code: 'EOVERRIDEVALUE' }, + 'should throw if trying to override a literal value with an object' + ) + q.set('foo.bar.nest', 'use the force!', { force: true }) + t.strictSame( + q.toJSON().foo, + { + bar: { + nest: 'use the force!', + }, + baz: 'baz', + 'lorem.ipsum': 'LOREM IPSUM', + }, + 'should allow overriding literal values when using force option' + ) + + const qq = new Queryable({}) + qq.set('foo.bar.baz', 'BAZ') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + }, + }, + }, + 'should add new props to qq object' + ) + qq.set('foo.bar.bario', 'bario') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + bario: 'bario', + }, + }, + }, + 'should add new props to a previously existing object' + ) + qq.set('lorem', 'lorem') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + baz: 'BAZ', + bario: 'bario', + }, + }, + lorem: 'lorem', + }, + 'should append new props added to object later' + ) + qq.set('foo.bar[foo.bar]', 'foo.bar.with.dots') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: { + 'foo.bar': 'foo.bar.with.dots', + baz: 'BAZ', + bario: 'bario', + }, + }, + lorem: 'lorem', + }, + 'should append new props added to object later' + ) +}) + +t.test('set arrays', async t => { + const q = new Queryable({}) + + q.set('foo[1]', 'b') + t.strictSame( + q.toJSON(), + { + foo: [ + undefined, + 'b', + ], + }, + 'should be able to set items in an array using index references' + ) + + q.set('foo[0]', 'a') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + ], + }, + 'should be able to set a previously missing item to an array' + ) + + q.set('foo[2]', 'c') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + 'c', + ], + }, + 'should be able to append more items to an array' + ) + + q.set('foo[2]', 'C') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + 'C', + ], + }, + 'should be able to override array items' + ) + + t.throws( + () => q.set('foo[2].bar', 'bar'), + { code: 'EOVERRIDEVALUE' }, + 'should throw if trying to override an array literal item with an obj' + ) + + q.set('foo[2].bar', 'bar', { force: true }) + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + ], + }, + 'should be able to override an array string item with an obj' + ) + + q.set('foo[3].foo', 'surprise surprise, another foo') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + { + foo: 'surprise surprise, another foo', + }, + ], + }, + 'should be able to append more items to an array' + ) + + q.set('foo[3].foo', 'FOO') + t.strictSame( + q.toJSON(), + { + foo: [ + 'a', + 'b', + { bar: 'bar' }, + { + foo: 'FOO', + }, + ], + }, + 'should be able to override property of an obj inside an array' + ) + + const qq = new Queryable({}) + qq.set('foo[0].bar[1].baz.bario[0][0][0]', 'something') + t.strictSame( + qq.toJSON(), + { + foo: [ + { + bar: [ + undefined, + { + baz: { + bario: [[['something']]], + }, + }, + ], + }, + ], + }, + 'should append as many arrays as necessary' + ) + qq.set('foo[0].bar[1].baz.bario[0][1][0]', 'something else') + t.strictSame( + qq.toJSON(), + { + foo: [ + { + bar: [ + undefined, + { + baz: { + bario: [[ + ['something'], + ['something else'], + ]], + }, + }, + ], + }, + ], + }, + 'should append as many arrays as necessary' + ) + qq.set('foo', null) + t.strictSame( + qq.toJSON(), + { + foo: null, + }, + 'should be able to set a value to null' + ) + qq.set('foo.bar', 'bar') + t.strictSame( + qq.toJSON(), + { + foo: { + bar: 'bar', + }, + }, + 'should be able to replace a null value with properties' + ) + + const qqq = new Queryable({ + arr: [ + 'a', + 'b', + ], + }) + + qqq.set('arr[]', 'c') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + ], + }, + 'should be able to append to array using empty bracket notation' + ) + + qqq.set('arr[].foo', 'foo') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + ], + }, + 'should be able to append objects to array using empty bracket notation' + ) + + qqq.set('arr[].bar.name', 'BAR') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + }, + 'should be able to append more objects to array using empty brackets' + ) + + qqq.set('foo.bar.baz[].lorem.ipsum', 'something') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + foo: { + bar: { + baz: [ + { + lorem: { + ipsum: 'something', + }, + }, + ], + }, + }, + }, + 'should be able to append to array using empty brackets in nested objs' + ) + + qqq.set('foo.bar.baz[].lorem.array[]', 'new item') + t.strictSame( + qqq.toJSON(), + { + arr: [ + 'a', + 'b', + 'c', + { + foo: 'foo', + }, + { + bar: { + name: 'BAR', + }, + }, + ], + foo: { + bar: { + baz: [ + { + lorem: { + ipsum: 'something', + }, + }, + { + lorem: { + array: [ + 'new item', + ], + }, + }, + ], + }, + }, + }, + 'should be able to append to array using empty brackets in nested objs' + ) + + const qqqq = new Queryable({ + arr: [ + 'a', + 'b', + ], + }) + t.throws( + () => qqqq.set('arr.foo', 'foo'), + { code: 'ENOADDPROP' }, + 'should throw an override error' + ) + + qqqq.set('arr.foo', 'foo', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + }, + 'should be able to override arrays with objects when using force=true' + ) + + qqqq.set('bar[]', 'item', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + bar: [ + 'item', + ], + }, + 'should be able to create new array with item when using force=true' + ) + + qqqq.set('bar[]', 'something else', { force: true }) + t.strictSame( + qqqq.toJSON(), + { + arr: { + 0: 'a', + 1: 'b', + foo: 'foo', + }, + bar: [ + 'item', + 'something else', + ], + }, + 'should be able to append items to arrays when using force=true' + ) + + const qqqqq = new Queryable({ + arr: [ + null, + ], + }) + qqqqq.set('arr[]', 'b') + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + null, + 'b', + ], + }, + 'should be able to append items with empty items' + ) + qqqqq.set('arr[0]', 'a') + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + }, + 'should be able to replace empty items in an array' + ) + qqqqq.set('lorem.ipsum', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + ipsum: 3, + }, + }, + 'should be able to replace empty items in an array' + ) + t.throws( + () => qqqqq.set('lorem[]', 4), + { code: 'ENOAPPEND' }, + 'should throw error if using empty square bracket in an non-array item' + ) + qqqqq.set('lorem[0]', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + 0: 3, + ipsum: 3, + }, + }, + 'should be able add indexes as props when finding an object' + ) + qqqqq.set('lorem.1', 3) + t.strictSame( + qqqqq.toJSON(), + { + arr: [ + 'a', + 'b', + ], + lorem: { + 0: 3, + 1: 3, + ipsum: 3, + }, + }, + 'should be able add numeric props to an obj' + ) +}) + +t.test('delete values', async t => { + const q = new Queryable({ + foo: { + bar: { + lorem: 'lorem', + }, + }, + }) + q.delete('foo.bar.lorem') + t.strictSame( + q.toJSON(), + { + foo: { + bar: {}, + }, + }, + 'should delete queried item' + ) + q.delete('foo') + t.strictSame( + q.toJSON(), + {}, + 'should delete nested items' + ) + q.set('foo.a.b.c[0]', 'value') + q.delete('foo.a.b.c[0]') + t.strictSame( + q.toJSON(), + { + foo: { + a: { + b: { + c: [], + }, + }, + }, + }, + 'should delete array item' + ) + // creates an array that has an implicit empty first item + q.set('foo.a.b.c[1][0].foo.bar[0][0]', 'value') + q.delete('foo.a.b.c[1]') + t.strictSame( + q.toJSON(), + { + foo: { + a: { + b: { + c: [null], + }, + }, + }, + }, + 'should delete array item' + ) +}) + +t.test('logger', async t => { + const q = new Queryable({}) + q.set('foo.bar[0].baz', 'baz') + t.strictSame( + inspect(q, { depth: 10 }), + inspect({ + foo: { + bar: [ + { + baz: 'baz', + }, + ], + }, + }, { depth: 10 }), + 'should retrieve expected data' + ) +}) + +t.test('bracket lovers', async t => { + const q = new Queryable({}) + q.set('[iLoveBrackets]', 'seriously?') + t.strictSame( + q.toJSON(), + { + '[iLoveBrackets]': 'seriously?', + }, + 'should be able to set top-level props using square brackets notation' + ) + + t.equal(q.get('[iLoveBrackets]'), 'seriously?', + 'should bypass square bracket in top-level properties') + + q.set('[0]', '-.-') + t.strictSame( + q.toJSON(), + { + '[iLoveBrackets]': 'seriously?', + '[0]': '-.-', + }, + 'any top-level item can not be parsed with square bracket notation' + ) +})