From 97f3072cebbbe7d99fefa5aaba66a12494d96a77 Mon Sep 17 00:00:00 2001 From: Robert Nagy Date: Thu, 17 Jun 2021 22:25:34 +0200 Subject: [PATCH 01/95] stream: add signal support to pipeline generators Generators in pipeline must be able to be aborted or pipeline can deadlock. PR-URL: https://github.com/nodejs/node/pull/39067 Reviewed-By: Matteo Collina Reviewed-By: Benjamin Gruenbaum Reviewed-By: James M Snell --- doc/api/stream.md | 51 +++++++++++++++++++++++---- lib/internal/streams/compose.js | 2 +- lib/internal/streams/duplexify.js | 18 +++++++--- lib/internal/streams/pipeline.js | 40 ++++++++++++++++++--- lib/stream.js | 2 +- lib/stream/promises.js | 18 ++-------- test/parallel/test-stream-pipeline.js | 19 ++++++++++ 7 files changed, 117 insertions(+), 33 deletions(-) diff --git a/doc/api/stream.md b/doc/api/stream.md index a700eb9233d77d..30660e4e0a855b 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -1886,16 +1886,14 @@ const { pipeline } = require('stream/promises'); async function run() { const ac = new AbortController(); - const options = { - signal: ac.signal, - }; + const signal = ac.signal; setTimeout(() => ac.abort(), 1); await pipeline( fs.createReadStream('archive.tar'), zlib.createGzip(), fs.createWriteStream('archive.tar.gz'), - options, + { signal }, ); } @@ -1911,10 +1909,10 @@ const fs = require('fs'); async function run() { await pipeline( fs.createReadStream('lowercase.txt'), - async function* (source) { + async function* (source, signal) { source.setEncoding('utf8'); // Work with strings rather than `Buffer`s. for await (const chunk of source) { - yield chunk.toUpperCase(); + yield await processChunk(chunk, { signal }); } }, fs.createWriteStream('uppercase.txt') @@ -1925,6 +1923,28 @@ async function run() { run().catch(console.error); ``` +Remember to handle the `signal` argument passed into the async generator. +Especially in the case where the async generator is the source for the +pipeline (i.e. first argument) or the pipeline will never complete. + +```js +const { pipeline } = require('stream/promises'); +const fs = require('fs'); + +async function run() { + await pipeline( + async function * (signal) { + await someLongRunningfn({ signal }); + yield 'asd'; + }, + fs.createWriteStream('uppercase.txt') + ); + console.log('Pipeline succeeded.'); +} + +run().catch(console.error); +``` + `stream.pipeline()` will call `stream.destroy(err)` on all streams except: * `Readable` streams which have emitted `'end'` or `'close'`. * `Writable` streams which have emitted `'finish'` or `'close'`. @@ -3342,13 +3362,20 @@ the `Readable.from()` utility method: ```js const { Readable } = require('stream'); +const ac = new AbortController(); +const signal = ac.signal; + async function * generate() { yield 'a'; + await someLongRunningFn({ signal }); yield 'b'; yield 'c'; } const readable = Readable.from(generate()); +readable.on('close', () => { + ac.abort(); +}); readable.on('data', (chunk) => { console.log(chunk); @@ -3368,6 +3395,11 @@ const { pipeline: pipelinePromise } = require('stream/promises'); const writable = fs.createWriteStream('./file'); +const ac = new AbortController(); +const signal = ac.signal; + +const iterator = createIterator({ signal }); + // Callback Pattern pipeline(iterator, writable, (err, value) => { if (err) { @@ -3375,6 +3407,8 @@ pipeline(iterator, writable, (err, value) => { } else { console.log(value, 'value returned'); } +}).on('close', () => { + ac.abort(); }); // Promise Pattern @@ -3382,7 +3416,10 @@ pipelinePromise(iterator, writable) .then((value) => { console.log(value, 'value returned'); }) - .catch(console.error); + .catch((err) => { + console.error(err); + ac.abort(); + }); ``` diff --git a/lib/internal/streams/compose.js b/lib/internal/streams/compose.js index 04b6c6dcb0a53e..d11a372732caab 100644 --- a/lib/internal/streams/compose.js +++ b/lib/internal/streams/compose.js @@ -1,6 +1,6 @@ 'use strict'; -const pipeline = require('internal/streams/pipeline'); +const { pipeline } = require('internal/streams/pipeline'); const Duplex = require('internal/streams/duplex'); const { destroyer } = require('internal/streams/destroy'); const { diff --git a/lib/internal/streams/duplexify.js b/lib/internal/streams/duplexify.js index 448c909fd52c01..fea0e508411c88 100644 --- a/lib/internal/streams/duplexify.js +++ b/lib/internal/streams/duplexify.js @@ -26,6 +26,7 @@ const from = require('internal/streams/from'); const { isBlob, } = require('internal/blob'); +const { AbortController } = require('internal/abort_controller'); const { FunctionPrototypeCall @@ -81,14 +82,15 @@ module.exports = function duplexify(body, name) { // } if (typeof body === 'function') { - const { value, write, final } = fromAsyncGen(body); + const { value, write, final, destroy } = fromAsyncGen(body); if (isIterable(value)) { return from(Duplexify, value, { // TODO (ronag): highWaterMark? objectMode: true, write, - final + final, + destroy }); } @@ -123,7 +125,8 @@ module.exports = function duplexify(body, name) { process.nextTick(cb, err); } }); - } + }, + destroy }); } @@ -202,15 +205,18 @@ module.exports = function duplexify(body, name) { function fromAsyncGen(fn) { let { promise, resolve } = createDeferredPromise(); + const ac = new AbortController(); + const signal = ac.signal; const value = fn(async function*() { while (true) { const { chunk, done, cb } = await promise; process.nextTick(cb); if (done) return; + if (signal.aborted) throw new AbortError(); yield chunk; ({ promise, resolve } = createDeferredPromise()); } - }()); + }(), { signal }); return { value, @@ -219,6 +225,10 @@ function fromAsyncGen(fn) { }, final(cb) { resolve({ done: true, cb }); + }, + destroy(err, cb) { + ac.abort(); + cb(err); } }; } diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index 3d39b3ac7b228a..012d99de0357f2 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -21,15 +21,20 @@ const { ERR_MISSING_ARGS, ERR_STREAM_DESTROYED, }, + AbortError, } = require('internal/errors'); -const { validateCallback } = require('internal/validators'); +const { + validateCallback, + validateAbortSignal +} = require('internal/validators'); const { isIterable, isReadableNodeStream, isNodeStream, } = require('internal/streams/utils'); +const { AbortController } = require('internal/abort_controller'); let PassThrough; let Readable; @@ -168,10 +173,26 @@ function pipeline(...streams) { streams = streams[0]; } + return pipelineImpl(streams, callback); +} + +function pipelineImpl(streams, callback, opts) { if (streams.length < 2) { throw new ERR_MISSING_ARGS('streams'); } + const ac = new AbortController(); + const signal = ac.signal; + const outerSignal = opts?.signal; + + validateAbortSignal(outerSignal, 'options.signal'); + + function abort() { + finishImpl(new AbortError()); + } + + outerSignal?.addEventListener('abort', abort); + let error; let value; const destroys = []; @@ -179,8 +200,10 @@ function pipeline(...streams) { let finishCount = 0; function finish(err) { - const final = --finishCount === 0; + finishImpl(err, --finishCount === 0); + } + function finishImpl(err, final) { if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { error = err; } @@ -193,6 +216,9 @@ function pipeline(...streams) { destroys.shift()(error); } + outerSignal?.removeEventListener('abort', abort); + ac.abort(); + if (final) { callback(error, value); } @@ -211,7 +237,7 @@ function pipeline(...streams) { if (i === 0) { if (typeof stream === 'function') { - ret = stream(); + ret = stream({ signal }); if (!isIterable(ret)) { throw new ERR_INVALID_RETURN_VALUE( 'Iterable, AsyncIterable or Stream', 'source', ret); @@ -223,7 +249,7 @@ function pipeline(...streams) { } } else if (typeof stream === 'function') { ret = makeAsyncIterable(ret); - ret = stream(ret); + ret = stream(ret, { signal }); if (reading) { if (!isIterable(ret, true)) { @@ -291,7 +317,11 @@ function pipeline(...streams) { } } + if (signal?.aborted || outerSignal?.aborted) { + process.nextTick(abort); + } + return ret; } -module.exports = pipeline; +module.exports = { pipelineImpl, pipeline }; diff --git a/lib/stream.js b/lib/stream.js index 43f59788f62bc8..cc56b76e31a4a6 100644 --- a/lib/stream.js +++ b/lib/stream.js @@ -29,8 +29,8 @@ const { promisify: { custom: customPromisify }, } = require('internal/util'); -const pipeline = require('internal/streams/pipeline'); const compose = require('internal/streams/compose'); +const { pipeline } = require('internal/streams/pipeline'); const { destroyer } = require('internal/streams/destroy'); const eos = require('internal/streams/end-of-stream'); const internalBuffer = require('internal/buffer'); diff --git a/lib/stream/promises.js b/lib/stream/promises.js index 8a8e66417c6057..0db01a8b208d60 100644 --- a/lib/stream/promises.js +++ b/lib/stream/promises.js @@ -5,20 +5,12 @@ const { Promise, } = primordials; -const { - addAbortSignalNoValidate, -} = require('internal/streams/add-abort-signal'); - -const { - validateAbortSignal, -} = require('internal/validators'); - const { isIterable, isNodeStream, } = require('internal/streams/utils'); -const pl = require('internal/streams/pipeline'); +const { pipelineImpl: pl } = require('internal/streams/pipeline'); const eos = require('internal/streams/end-of-stream'); function pipeline(...streams) { @@ -29,19 +21,15 @@ function pipeline(...streams) { !isNodeStream(lastArg) && !isIterable(lastArg)) { const options = ArrayPrototypePop(streams); signal = options.signal; - validateAbortSignal(signal, 'options.signal'); } - const pipe = pl(...streams, (err, value) => { + pl(streams, (err, value) => { if (err) { reject(err); } else { resolve(value); } - }); - if (signal) { - addAbortSignalNoValidate(signal, pipe); - } + }, { signal }); }); } diff --git a/test/parallel/test-stream-pipeline.js b/test/parallel/test-stream-pipeline.js index e2e5fe2e0d561a..b21e1ce52b3cb3 100644 --- a/test/parallel/test-stream-pipeline.js +++ b/test/parallel/test-stream-pipeline.js @@ -11,10 +11,12 @@ const { Duplex, addAbortSignal, } = require('stream'); +const pipelinep = require('stream/promises').pipeline; const assert = require('assert'); const http = require('http'); const { promisify } = require('util'); const net = require('net'); +const tsp = require('timers/promises'); { let finished = false; @@ -1387,3 +1389,20 @@ const net = require('net'); assert.strictEqual(res, content); })); } + +{ + const ac = new AbortController(); + const signal = ac.signal; + pipelinep( + async function * ({ signal }) { + await tsp.setTimeout(1e6, signal); + }, + async function(source) { + + }, + { signal } + ).catch(common.mustCall((err) => { + assert.strictEqual(err.name, 'AbortError'); + })); + ac.abort(); +} From 22a78a75eeb5e19ed3a70e90ed4d80e06a60fe90 Mon Sep 17 00:00:00 2001 From: Jesse Chan Date: Sun, 22 Aug 2021 02:21:23 +0800 Subject: [PATCH 02/95] build: preserves symbols during LTO with macOS linker man ld -export_dynamic: ``` Preserves all global symbols in main executables during LTO. Without this option, Link Time Optimization is allowed to inline and remove global functions. This option is used when a main executable may load a plug-in which requires certain symbols from the main executable. ``` Bug: vercel/pkg#1155 Signed-off-by: Jesse Chan PR-URL: https://github.com/nodejs/node/pull/39839 Reviewed-By: Luigi Pinca Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- node.gyp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/node.gyp b/node.gyp index 0c11797baa2197..faf50d86bcad64 100644 --- a/node.gyp +++ b/node.gyp @@ -236,6 +236,19 @@ 'OTHER_LDFLAGS': [ '-Wl,-rpath,@loader_path', ], }, }], + [ 'enable_lto=="true"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + # man ld -export_dynamic: + # Preserves all global symbols in main executables during LTO. + # Without this option, Link Time Optimization is allowed to + # inline and remove global functions. This option is used when + # a main executable may load a plug-in which requires certain + # symbols from the main executable. + '-Wl,-export_dynamic', + ], + }, + }], ['OS=="win"', { 'libraries': [ 'Dbghelp.lib', From fb226ff2eeb55680e86d03d625507f45ace6d328 Mon Sep 17 00:00:00 2001 From: Filip Skokan Date: Sun, 29 Aug 2021 10:09:48 +0200 Subject: [PATCH 03/95] crypto: add rsa-pss keygen parameters PR-URL: https://github.com/nodejs/node/pull/39927 Reviewed-By: James M Snell --- doc/api/crypto.md | 16 ++++ doc/api/deprecations.md | 13 +++ lib/internal/crypto/keygen.js | 43 ++++++++-- .../test-crypto-keygen-deprecation.js | 51 ++++++++++++ test/parallel/test-crypto-keygen.js | 83 +++++++++++++++---- 5 files changed, 184 insertions(+), 22 deletions(-) create mode 100644 test/parallel/test-crypto-keygen-deprecation.js diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 7c69a638e09a5c..db917d7eb81113 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -3375,6 +3375,10 @@ generateKey('hmac', { length: 64 }, (err, key) => { + +Type: Documentation-only (supports [`--pending-deprecation`][]) + +The `'hash'` and `'mgf1Hash'` options are replaced with `'hashAlgorithm'` +and `'mgf1HashAlgorithm'`. + [Legacy URL API]: url.md#url_legacy_url_api [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 diff --git a/lib/internal/crypto/keygen.js b/lib/internal/crypto/keygen.js index 49a7f044cb66cd..1469a01682edda 100644 --- a/lib/internal/crypto/keygen.js +++ b/lib/internal/crypto/keygen.js @@ -60,6 +60,9 @@ const { const { isArrayBufferView } = require('internal/util/types'); +const { getOptionValue } = require('internal/options'); +const pendingDeprecation = getOptionValue('--pending-deprecation'); + function wrapKey(key, ctor) { if (typeof key === 'string' || isArrayBufferView(key) || @@ -193,21 +196,47 @@ function createJob(mode, type, options) { ...encoding); } - const { hash, mgf1Hash, saltLength } = options; - if (hash !== undefined && typeof hash !== 'string') - throw new ERR_INVALID_ARG_VALUE('options.hash', hash); - if (mgf1Hash !== undefined && typeof mgf1Hash !== 'string') - throw new ERR_INVALID_ARG_VALUE('options.mgf1Hash', mgf1Hash); + const { + hash, mgf1Hash, hashAlgorithm, mgf1HashAlgorithm, saltLength + } = options; if (saltLength !== undefined && (!isInt32(saltLength) || saltLength < 0)) throw new ERR_INVALID_ARG_VALUE('options.saltLength', saltLength); + if (hashAlgorithm !== undefined && typeof hashAlgorithm !== 'string') + throw new ERR_INVALID_ARG_VALUE('options.hashAlgorithm', hashAlgorithm); + if (mgf1HashAlgorithm !== undefined && + typeof mgf1HashAlgorithm !== 'string') + throw new ERR_INVALID_ARG_VALUE('options.mgf1HashAlgorithm', + mgf1HashAlgorithm); + if (hash !== undefined) { + pendingDeprecation && process.emitWarning( + '"options.hash" is deprecated, ' + + 'use "options.hashAlgorithm" instead.', + 'DeprecationWarning', + 'DEP0154'); + if (typeof hash !== 'string' || + (hashAlgorithm && hash !== hashAlgorithm)) { + throw new ERR_INVALID_ARG_VALUE('options.hash', hash); + } + } + if (mgf1Hash !== undefined) { + pendingDeprecation && process.emitWarning( + '"options.mgf1Hash" is deprecated, ' + + 'use "options.mgf1HashAlgorithm" instead.', + 'DeprecationWarning', + 'DEP0154'); + if (typeof mgf1Hash !== 'string' || + (mgf1HashAlgorithm && mgf1Hash !== mgf1HashAlgorithm)) { + throw new ERR_INVALID_ARG_VALUE('options.mgf1Hash', mgf1Hash); + } + } return new RsaKeyPairGenJob( mode, kKeyVariantRSA_PSS, modulusLength, publicExponent, - hash, - mgf1Hash, + hashAlgorithm || hash, + mgf1HashAlgorithm || mgf1Hash, saltLength, ...encoding); } diff --git a/test/parallel/test-crypto-keygen-deprecation.js b/test/parallel/test-crypto-keygen-deprecation.js new file mode 100644 index 00000000000000..318377e840b0fc --- /dev/null +++ b/test/parallel/test-crypto-keygen-deprecation.js @@ -0,0 +1,51 @@ +// Flags: --pending-deprecation + +'use strict'; + +const common = require('../common'); +if (!common.hasCrypto) + common.skip('missing crypto'); + +const DeprecationWarning = []; +DeprecationWarning.push([ + '"options.hash" is deprecated, use "options.hashAlgorithm" instead.', + 'DEP0154']); +DeprecationWarning.push([ + '"options.mgf1Hash" is deprecated, use "options.mgf1HashAlgorithm" instead.', + 'DEP0154']); + +common.expectWarning({ DeprecationWarning }); + +const assert = require('assert'); +const { generateKeyPair } = require('crypto'); + +{ + // This test makes sure deprecated options still work as intended + + generateKeyPair('rsa-pss', { + modulusLength: 512, + saltLength: 16, + hash: 'sha256', + mgf1Hash: 'sha256' + }, common.mustSucceed((publicKey, privateKey) => { + assert.strictEqual(publicKey.type, 'public'); + assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256', + saltLength: 16 + }); + + assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(privateKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256', + saltLength: 16 + }); + })); +} diff --git a/test/parallel/test-crypto-keygen.js b/test/parallel/test-crypto-keygen.js index 09d43317426e71..d35eeae5b98ed5 100644 --- a/test/parallel/test-crypto-keygen.js +++ b/test/parallel/test-crypto-keygen.js @@ -302,8 +302,8 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); generateKeyPair('rsa-pss', { modulusLength: 512, saltLength: 16, - hash: 'sha256', - mgf1Hash: 'sha256' + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256' }, common.mustSucceed((publicKey, privateKey) => { assert.strictEqual(publicKey.type, 'public'); assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); @@ -1324,12 +1324,12 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); assert.throws(() => { generateKeyPairSync('rsa-pss', { modulusLength: 4096, - hash: hashValue + hashAlgorithm: hashValue }); }, { name: 'TypeError', code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.hash' is invalid. " + + message: "The property 'options.hashAlgorithm' is invalid. " + `Received ${inspect(hashValue)}` }); } @@ -1339,8 +1339,8 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); generateKeyPair('rsa-pss', { modulusLength: 512, saltLength: 2147483648, - hash: 'sha256', - mgf1Hash: 'sha256' + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256' }, common.mustNotCall()); }, { name: 'TypeError', @@ -1353,8 +1353,8 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); generateKeyPair('rsa-pss', { modulusLength: 512, saltLength: -1, - hash: 'sha256', - mgf1Hash: 'sha256' + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256' }, common.mustNotCall()); }, { name: 'TypeError', @@ -1451,8 +1451,8 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); generateKeyPair('rsa-pss', { modulusLength: 512, saltLength: 16, - hash: 'sha256', - mgf1Hash: undefined + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: undefined }); }, { @@ -1462,21 +1462,21 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); } ); - for (const mgf1Hash of [null, 0, false, {}, []]) { + for (const mgf1HashAlgorithm of [null, 0, false, {}, []]) { assert.throws( () => { generateKeyPair('rsa-pss', { modulusLength: 512, saltLength: 16, - hash: 'sha256', - mgf1Hash + hashAlgorithm: 'sha256', + mgf1HashAlgorithm }, common.mustNotCall()); }, { name: 'TypeError', code: 'ERR_INVALID_ARG_VALUE', - message: "The property 'options.mgf1Hash' is invalid. " + - `Received ${inspect(mgf1Hash)}` + message: "The property 'options.mgf1HashAlgorithm' is invalid. " + + `Received ${inspect(mgf1HashAlgorithm)}` } ); @@ -1568,3 +1568,56 @@ if (!common.hasOpenSSL3) { } } } + +{ + // This test makes sure deprecated and new options may be used + // simultaneously so long as they're identical values. + + generateKeyPair('rsa-pss', { + modulusLength: 512, + saltLength: 16, + hash: 'sha256', + hashAlgorithm: 'sha256', + mgf1Hash: 'sha256', + mgf1HashAlgorithm: 'sha256' + }, common.mustSucceed((publicKey, privateKey) => { + assert.strictEqual(publicKey.type, 'public'); + assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256', + saltLength: 16 + }); + + assert.strictEqual(privateKey.type, 'private'); + assert.strictEqual(privateKey.asymmetricKeyType, 'rsa-pss'); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256', + saltLength: 16 + }); + })); +} + +{ + // This test makes sure deprecated and new options must + // be the same value. + + assert.throws(() => generateKeyPair('rsa-pss', { + modulusLength: 512, + saltLength: 16, + mgf1Hash: 'sha256', + mgf1HashAlgorithm: 'sha1' + }, common.mustNotCall()), { code: 'ERR_INVALID_ARG_VALUE' }); + + assert.throws(() => generateKeyPair('rsa-pss', { + modulusLength: 512, + saltLength: 16, + hash: 'sha256', + hashAlgorithm: 'sha1' + }, common.mustNotCall()), { code: 'ERR_INVALID_ARG_VALUE' }); +} From 381293f54aa1e9f26e4c591a7c689f46b9ef507a Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Tue, 31 Aug 2021 13:09:20 +0000 Subject: [PATCH 04/95] meta: update AUTHORS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39957 Reviewed-By: Rich Trott Reviewed-By: Michaël Zasso Reviewed-By: James M Snell --- AUTHORS | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/AUTHORS b/AUTHORS index b9ad7e86f12f5c..f9259c152d0669 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3309,5 +3309,10 @@ Mestery Himadri Ganguly Howie Zhao Luan Devecchi +ashish maurya +Justin +Raz Luvaton +Don Jayamanne +Felix Yan # Generated by tools/update-authors.js From bef78a2f884adc02e1cc07c280b9f484cabcb0f4 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:11:01 +0800 Subject: [PATCH 05/95] src: register external references of dtrace for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_dtrace.cc | 31 ++++++++++++++++++++++--------- src/node_external_reference.h | 9 ++++++++- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/src/node_dtrace.cc b/src/node_dtrace.cc index 3c407f3447f171..7808a649a4a088 100644 --- a/src/node_dtrace.cc +++ b/src/node_dtrace.cc @@ -44,6 +44,7 @@ #include "env-inl.h" #include "node_errors.h" +#include "node_external_reference.h" #include @@ -288,6 +289,14 @@ void InitDTrace(Environment* env) { }, env); } +#define NODE_PROBES(V) \ + V(DTRACE_NET_SERVER_CONNECTION) \ + V(DTRACE_NET_STREAM_END) \ + V(DTRACE_HTTP_SERVER_REQUEST) \ + V(DTRACE_HTTP_SERVER_RESPONSE) \ + V(DTRACE_HTTP_CLIENT_REQUEST) \ + V(DTRACE_HTTP_CLIENT_RESPONSE) + void InitializeDTrace(Local target, Local unused, Local context, @@ -295,16 +304,20 @@ void InitializeDTrace(Local target, Environment* env = Environment::GetCurrent(context); #if defined HAVE_DTRACE || defined HAVE_ETW -# define NODE_PROBE(name) env->SetMethod(target, #name, name); - NODE_PROBE(DTRACE_NET_SERVER_CONNECTION) - NODE_PROBE(DTRACE_NET_STREAM_END) - NODE_PROBE(DTRACE_HTTP_SERVER_REQUEST) - NODE_PROBE(DTRACE_HTTP_SERVER_RESPONSE) - NODE_PROBE(DTRACE_HTTP_CLIENT_REQUEST) - NODE_PROBE(DTRACE_HTTP_CLIENT_RESPONSE) -# undef NODE_PROBE -#endif +#define V(name) env->SetMethod(target, #name, name); + NODE_PROBES(V) +#undef V +#endif // defined HAVE_DTRACE || defined HAVE_ETW +} + +void RegisterDtraceExternalReferences(ExternalReferenceRegistry* registry) { +#if defined HAVE_DTRACE || defined HAVE_ETW +#define V(name) registry->Register(name); + NODE_PROBES(V) +#undef V +#endif // defined HAVE_DTRACE || defined HAVE_ETW } } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(dtrace, node::InitializeDTrace) +NODE_MODULE_EXTERNAL_REFERENCE(dtrace, node::RegisterDtraceExternalReferences) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 8d2de64ad43445..280f1269d9bc27 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -92,10 +92,17 @@ class ExternalReferenceRegistry { #define EXTERNAL_REFERENCE_BINDING_LIST_INSPECTOR(V) #endif // HAVE_INSPECTOR +#if HAVE_DTRACE || HAVE_ETW +#define EXTERNAL_REFERENCE_BINDING_LIST_DTRACE(V) V(dtrace) +#else +#define EXTERNAL_REFERENCE_BINDING_LIST_DTRACE(V) +#endif + #define EXTERNAL_REFERENCE_BINDING_LIST(V) \ EXTERNAL_REFERENCE_BINDING_LIST_BASE(V) \ EXTERNAL_REFERENCE_BINDING_LIST_INSPECTOR(V) \ - EXTERNAL_REFERENCE_BINDING_LIST_I18N(V) + EXTERNAL_REFERENCE_BINDING_LIST_I18N(V) \ + EXTERNAL_REFERENCE_BINDING_LIST_DTRACE(V) } // namespace node From 6fdf02523ee151966155e2681e8762ce764d9925 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:11:42 +0800 Subject: [PATCH 06/95] src: register external references of node-report for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_external_reference.h | 1 + src/node_report_module.cc | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 280f1269d9bc27..37f06e9d30b577 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -65,6 +65,7 @@ class ExternalReferenceRegistry { V(performance) \ V(process_methods) \ V(process_object) \ + V(report) \ V(task_queue) \ V(url) \ V(util) \ diff --git a/src/node_report_module.cc b/src/node_report_module.cc index 190755a85b2369..160498c6276a3f 100644 --- a/src/node_report_module.cc +++ b/src/node_report_module.cc @@ -1,5 +1,6 @@ #include "env.h" #include "node_errors.h" +#include "node_external_reference.h" #include "node_internals.h" #include "node_options.h" #include "node_report.h" @@ -196,6 +197,26 @@ static void Initialize(Local exports, SetReportOnUncaughtException); } +void RegisterExternalReferences(node::ExternalReferenceRegistry* registry) { + registry->Register(WriteReport); + registry->Register(GetReport); + registry->Register(GetCompact); + registry->Register(SetCompact); + registry->Register(GetDirectory); + registry->Register(SetDirectory); + registry->Register(GetFilename); + registry->Register(SetFilename); + registry->Register(GetSignal); + registry->Register(SetSignal); + registry->Register(ShouldReportOnFatalError); + registry->Register(SetReportOnFatalError); + registry->Register(ShouldReportOnSignal); + registry->Register(SetReportOnSignal); + registry->Register(ShouldReportOnUncaughtException); + registry->Register(SetReportOnUncaughtException); +} + } // namespace report NODE_MODULE_CONTEXT_AWARE_INTERNAL(report, report::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(report, report::RegisterExternalReferences) From a2c1c3ef64d20a992fc4ce9cefb02474d4bd4ea0 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:17:06 +0800 Subject: [PATCH 07/95] src: register external references of BaseObject for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/base_object-inl.h | 16 +++++++++------- src/base_object.h | 2 ++ src/node_external_reference.cc | 3 +++ 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/base_object-inl.h b/src/base_object-inl.h index ad900b6399f149..bb1e8d4b46bce3 100644 --- a/src/base_object-inl.h +++ b/src/base_object-inl.h @@ -148,15 +148,17 @@ bool BaseObject::IsWeakOrDetached() const { return pd->wants_weak_jsobj || pd->is_detached; } +void BaseObject::LazilyInitializedJSTemplateConstructor( + const v8::FunctionCallbackInfo& args) { + DCHECK(args.IsConstructCall()); + DCHECK_GT(args.This()->InternalFieldCount(), 0); + args.This()->SetAlignedPointerInInternalField(BaseObject::kSlot, nullptr); +} + v8::Local BaseObject::MakeLazilyInitializedJSTemplate(Environment* env) { - auto constructor = [](const v8::FunctionCallbackInfo& args) { - DCHECK(args.IsConstructCall()); - DCHECK_GT(args.This()->InternalFieldCount(), 0); - args.This()->SetAlignedPointerInInternalField(BaseObject::kSlot, nullptr); - }; - - v8::Local t = env->NewFunctionTemplate(constructor); + v8::Local t = + env->NewFunctionTemplate(LazilyInitializedJSTemplateConstructor); t->Inherit(BaseObject::GetConstructorTemplate(env)); t->InstanceTemplate()->SetInternalFieldCount( BaseObject::kInternalFieldCount); diff --git a/src/base_object.h b/src/base_object.h index ec9d4a69d537b2..d46a0f216009c6 100644 --- a/src/base_object.h +++ b/src/base_object.h @@ -65,6 +65,8 @@ class BaseObject : public MemoryRetainer { // was also passed to the `BaseObject()` constructor initially. // This may return `nullptr` if the C++ object has not been constructed yet, // e.g. when the JS object used `MakeLazilyInitializedJSTemplate`. + static inline void LazilyInitializedJSTemplateConstructor( + const v8::FunctionCallbackInfo& args); static inline BaseObject* FromJSObject(v8::Local object); template static inline T* FromJSObject(v8::Local object); diff --git a/src/node_external_reference.cc b/src/node_external_reference.cc index 73e1489865d3a4..94198719b6a002 100644 --- a/src/node_external_reference.cc +++ b/src/node_external_reference.cc @@ -1,6 +1,7 @@ #include "node_external_reference.h" #include #include +#include "base_object-inl.h" #include "util.h" namespace node { @@ -13,6 +14,8 @@ const std::vector& ExternalReferenceRegistry::external_references() { } ExternalReferenceRegistry::ExternalReferenceRegistry() { + this->Register(BaseObject::LazilyInitializedJSTemplateConstructor); + #define V(modname) _register_external_reference_##modname(this); EXTERNAL_REFERENCE_BINDING_LIST(V) #undef V From b4e074c29516e1c9cb4c97dc659d45f91876c339 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:18:34 +0800 Subject: [PATCH 08/95] src: register missing stream wrap external references PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/stream_wrap.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/stream_wrap.cc b/src/stream_wrap.cc index 78d20f912b4cdb..2275167ad0eeb6 100644 --- a/src/stream_wrap.cc +++ b/src/stream_wrap.cc @@ -106,6 +106,10 @@ void LibuvStreamWrap::Initialize(Local target, void LibuvStreamWrap::RegisterExternalReferences( ExternalReferenceRegistry* registry) { registry->Register(IsConstructCallCallback); + registry->Register(GetWriteQueueSize); + registry->Register(SetBlocking); + // TODO(joyee): StreamBase::RegisterExternalReferences() is called somewhere + // else but we may want to do it here too and guard it with a static flag. } LibuvStreamWrap::LibuvStreamWrap(Environment* env, From db75711c5c8db3464b45c8176b4bea1a54fd3c63 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:18:59 +0800 Subject: [PATCH 09/95] src: register missing process methods external references PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_process_object.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/node_process_object.cc b/src/node_process_object.cc index 587b4ecb936835..29f6569a45e5b2 100644 --- a/src/node_process_object.cc +++ b/src/node_process_object.cc @@ -211,6 +211,11 @@ void PatchProcessObject(const FunctionCallbackInfo& args) { void RegisterProcessExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(RawDebug); + registry->Register(GetParentProcessId); + registry->Register(DebugPortSetter); + registry->Register(DebugPortGetter); + registry->Register(ProcessTitleSetter); + registry->Register(ProcessTitleGetter); } } // namespace node From 6095fb07b6f8ffd46cc4ae34d6d423bd5606e817 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:20:04 +0800 Subject: [PATCH 10/95] src: register external references of SignalWrap for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_external_reference.h | 1 + src/signal_wrap.cc | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 37f06e9d30b577..516a661b61c152 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -72,6 +72,7 @@ class ExternalReferenceRegistry { V(serdes) \ V(string_decoder) \ V(stream_wrap) \ + V(signal_wrap) \ V(trace_events) \ V(timers) \ V(types) \ diff --git a/src/signal_wrap.cc b/src/signal_wrap.cc index e8a1500d2e9961..df7f94eeec8451 100644 --- a/src/signal_wrap.cc +++ b/src/signal_wrap.cc @@ -22,6 +22,7 @@ #include "async_wrap-inl.h" #include "env-inl.h" #include "handle_wrap.h" +#include "node_external_reference.h" #include "node_process-inl.h" #include "util-inl.h" #include "v8.h" @@ -62,6 +63,12 @@ class SignalWrap : public HandleWrap { env->SetConstructorFunction(target, "Signal", constructor); } + static void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(Start); + registry->Register(Stop); + } + SET_NO_MEMORY_INFO() SET_MEMORY_INFO_NAME(SignalWrap) SET_SELF_SIZE(SignalWrap) @@ -167,3 +174,5 @@ bool HasSignalJSHandler(int signum) { NODE_MODULE_CONTEXT_AWARE_INTERNAL(signal_wrap, node::SignalWrap::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(signal_wrap, + node::SignalWrap::RegisterExternalReferences) From 00cca4808186e53a9cafe7ddb75eac7d28afaf07 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:20:36 +0800 Subject: [PATCH 11/95] src: register external references of TCPWrap for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_external_reference.h | 1 + src/tcp_wrap.cc | 22 +++++++++++++++++++++- src/tcp_wrap.h | 2 ++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 516a661b61c152..90035ae6966709 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -67,6 +67,7 @@ class ExternalReferenceRegistry { V(process_object) \ V(report) \ V(task_queue) \ + V(tcp_wrap) \ V(url) \ V(util) \ V(serdes) \ diff --git a/src/tcp_wrap.cc b/src/tcp_wrap.cc index cd7174984e2e36..84b18a1592d93c 100644 --- a/src/tcp_wrap.cc +++ b/src/tcp_wrap.cc @@ -21,12 +21,13 @@ #include "tcp_wrap.h" +#include "connect_wrap.h" #include "connection_wrap.h" #include "env-inl.h" #include "handle_wrap.h" #include "node_buffer.h" +#include "node_external_reference.h" #include "node_internals.h" -#include "connect_wrap.h" #include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" @@ -120,6 +121,23 @@ void TCPWrap::Initialize(Local target, constants).Check(); } +void TCPWrap::RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(Open); + registry->Register(Bind); + registry->Register(Listen); + registry->Register(Connect); + registry->Register(Bind6); + registry->Register(Connect6); + + registry->Register(GetSockOrPeerName); + registry->Register(GetSockOrPeerName); + registry->Register(SetNoDelay); + registry->Register(SetKeepAlive); +#ifdef _WIN32 + registry->Register(SetSimultaneousAccepts); +#endif +} void TCPWrap::New(const FunctionCallbackInfo& args) { // This constructor should not be exposed to public javascript. @@ -393,3 +411,5 @@ Local AddressToJS(Environment* env, } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(tcp_wrap, node::TCPWrap::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(tcp_wrap, + node::TCPWrap::RegisterExternalReferences) diff --git a/src/tcp_wrap.h b/src/tcp_wrap.h index 0099eedb4bc629..3abf4ded19fd7c 100644 --- a/src/tcp_wrap.h +++ b/src/tcp_wrap.h @@ -29,6 +29,7 @@ namespace node { +class ExternalReferenceRegistry; class Environment; class TCPWrap : public ConnectionWrap { @@ -45,6 +46,7 @@ class TCPWrap : public ConnectionWrap { v8::Local unused, v8::Local context, void* priv); + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); SET_NO_MEMORY_INFO() SET_SELF_SIZE(TCPWrap) From 7a17cbfdeab03cad09e620a9cffaa4a3f7252f37 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:21:02 +0800 Subject: [PATCH 12/95] src: register external references of TTYWrap for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_external_reference.h | 1 + src/tty_wrap.cc | 10 ++++++++++ src/tty_wrap.h | 2 ++ 3 files changed, 13 insertions(+) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 90035ae6966709..c336c62e77b5ee 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -68,6 +68,7 @@ class ExternalReferenceRegistry { V(report) \ V(task_queue) \ V(tcp_wrap) \ + V(tty_wrap) \ V(url) \ V(util) \ V(serdes) \ diff --git a/src/tty_wrap.cc b/src/tty_wrap.cc index 401c2513dbc628..1c749a9741c312 100644 --- a/src/tty_wrap.cc +++ b/src/tty_wrap.cc @@ -24,6 +24,7 @@ #include "env-inl.h" #include "handle_wrap.h" #include "node_buffer.h" +#include "node_external_reference.h" #include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" @@ -40,6 +41,13 @@ using v8::Object; using v8::String; using v8::Value; +void TTYWrap::RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(GetWindowSize); + registry->Register(SetRawMode); + registry->Register(IsTTY); +} + void TTYWrap::Initialize(Local target, Local unused, Local context, @@ -147,3 +155,5 @@ TTYWrap::TTYWrap(Environment* env, } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(tty_wrap, node::TTYWrap::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(tty_wrap, + node::TTYWrap::RegisterExternalReferences) diff --git a/src/tty_wrap.h b/src/tty_wrap.h index fdf07e4242c1f8..5a7863cb585b1c 100644 --- a/src/tty_wrap.h +++ b/src/tty_wrap.h @@ -30,6 +30,7 @@ namespace node { class Environment; +class ExternalReferenceRegistry; class TTYWrap : public LibuvStreamWrap { public: @@ -37,6 +38,7 @@ class TTYWrap : public LibuvStreamWrap { v8::Local unused, v8::Local context, void* priv); + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); SET_NO_MEMORY_INFO() SET_MEMORY_INFO_NAME(TTYWrap) From 826eee363cd66b91298bbecb45650a1a50bd8ac6 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 31 Aug 2021 23:21:25 +0800 Subject: [PATCH 13/95] src: register external references of PipeWrap for snapshot PR-URL: https://github.com/nodejs/node/pull/39961 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- src/node_external_reference.h | 1 + src/pipe_wrap.cc | 16 +++++++++++++++- src/pipe_wrap.h | 2 ++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index c336c62e77b5ee..4ebaa8d27feae8 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -71,6 +71,7 @@ class ExternalReferenceRegistry { V(tty_wrap) \ V(url) \ V(util) \ + V(pipe_wrap) \ V(serdes) \ V(string_decoder) \ V(stream_wrap) \ diff --git a/src/pipe_wrap.cc b/src/pipe_wrap.cc index 7ec3c66a78bb95..da52f5cee01062 100644 --- a/src/pipe_wrap.cc +++ b/src/pipe_wrap.cc @@ -22,12 +22,13 @@ #include "pipe_wrap.h" #include "async_wrap.h" +#include "connect_wrap.h" #include "connection_wrap.h" #include "env-inl.h" #include "handle_wrap.h" #include "node.h" #include "node_buffer.h" -#include "connect_wrap.h" +#include "node_external_reference.h" #include "stream_base-inl.h" #include "stream_wrap.h" #include "util-inl.h" @@ -104,6 +105,17 @@ void PipeWrap::Initialize(Local target, constants).Check(); } +void PipeWrap::RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(Bind); + registry->Register(Listen); + registry->Register(Connect); + registry->Register(Open); +#ifdef _WIN32 + registry->Register(SetPendingInstances); +#endif + registry->Register(Fchmod); +} void PipeWrap::New(const FunctionCallbackInfo& args) { // This constructor should not be exposed to public javascript. @@ -236,3 +248,5 @@ void PipeWrap::Connect(const FunctionCallbackInfo& args) { } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(pipe_wrap, node::PipeWrap::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(pipe_wrap, + node::PipeWrap::RegisterExternalReferences) diff --git a/src/pipe_wrap.h b/src/pipe_wrap.h index 53008b0d165044..c0722b63d85372 100644 --- a/src/pipe_wrap.h +++ b/src/pipe_wrap.h @@ -29,6 +29,7 @@ namespace node { +class ExternalReferenceRegistry; class Environment; class PipeWrap : public ConnectionWrap { @@ -47,6 +48,7 @@ class PipeWrap : public ConnectionWrap { v8::Local context, void* priv); + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); SET_NO_MEMORY_INFO() SET_MEMORY_INFO_NAME(PipeWrap) SET_SELF_SIZE(PipeWrap) From b6dd2ea930ccaae9cfceecfbd9efc98cd1f22790 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Wed, 1 Sep 2021 19:21:41 +0200 Subject: [PATCH 14/95] doc: fix missing history version in `fs.md` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/nodejs/node/pull/33716 Refs: https://github.com/nodejs/node/pull/35993 Refs: https://github.com/nodejs/node/pull/35911 PR-URL: https://github.com/nodejs/node/pull/39972 Reviewed-By: Michaël Zasso Reviewed-By: Luigi Pinca Reviewed-By: Juan José Arboleda Reviewed-By: James M Snell Reviewed-By: Darshan Sen Reviewed-By: Tobias Nießen --- doc/api/fs.md | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index b704d87ea94e48..0cb93871b9924e 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -983,7 +983,9 @@ try { -### `performanceEntry.details` +### `performanceEntry.detail` @@ -382,7 +382,7 @@ Performance Entry. ### Garbage Collection ('gc') Details -When `performanceEntry.type` is equal to `'gc'`, the `performanceEntry.details` +When `performanceEntry.type` is equal to `'gc'`, the `performanceEntry.detail` property will be an {Object} with two properties: * `kind` {number} One of: @@ -402,10 +402,10 @@ property will be an {Object} with two properties: ### HTTP/2 ('http2') Details When `performanceEntry.type` is equal to `'http2'`, the -`performanceEntry.details` property will be an {Object} containing +`performanceEntry.detail` property will be an {Object} containing additional performance information. -If `performanceEntry.name` is equal to `Http2Stream`, the `details` +If `performanceEntry.name` is equal to `Http2Stream`, the `detail` will contain the following properties: * `bytesRead` {number} The number of `DATA` frame bytes received for this @@ -420,7 +420,7 @@ will contain the following properties: * `timeToFirstHeader` {number} The number of milliseconds elapsed between the `PerformanceEntry` `startTime` and the reception of the first header. -If `performanceEntry.name` is equal to `Http2Session`, the `details` will +If `performanceEntry.name` is equal to `Http2Session`, the `detail` will contain the following properties: * `bytesRead` {number} The number of bytes received for this `Http2Session`. @@ -443,7 +443,7 @@ contain the following properties: ### Timerify ('function') Details When `performanceEntry.type` is equal to `'function'`, the -`performanceEntry.details` property will be an {Array} listing +`performanceEntry.detail` property will be an {Array} listing the input arguments to the timed function. ## Class: `PerformanceNodeTiming` From edcfffeaea24839f0cf3d81e055f123be6c2c2ef Mon Sep 17 00:00:00 2001 From: null <60427892+vierofernando@users.noreply.github.com> Date: Fri, 3 Sep 2021 07:48:58 +0700 Subject: [PATCH 30/95] lib: use standard property names MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The standard property names that aren't strings can be used where appropiate, this is one of them. PR-URL: https://github.com/nodejs/node/pull/39981 Reviewed-By: James M Snell Reviewed-By: Gerhard Stöbich Reviewed-By: Luigi Pinca Reviewed-By: Darshan Sen Reviewed-By: Tobias Nießen Reviewed-By: Michael Dawson --- lib/v8.js | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/lib/v8.js b/lib/v8.js index b4e2d5cd1a751a..a7ae0cabbb3cd9 100644 --- a/lib/v8.js +++ b/lib/v8.js @@ -154,17 +154,17 @@ function getHeapStatistics() { updateHeapStatisticsBuffer(); return { - 'total_heap_size': buffer[kTotalHeapSizeIndex], - 'total_heap_size_executable': buffer[kTotalHeapSizeExecutableIndex], - 'total_physical_size': buffer[kTotalPhysicalSizeIndex], - 'total_available_size': buffer[kTotalAvailableSize], - 'used_heap_size': buffer[kUsedHeapSizeIndex], - 'heap_size_limit': buffer[kHeapSizeLimitIndex], - 'malloced_memory': buffer[kMallocedMemoryIndex], - 'peak_malloced_memory': buffer[kPeakMallocedMemoryIndex], - 'does_zap_garbage': buffer[kDoesZapGarbageIndex], - 'number_of_native_contexts': buffer[kNumberOfNativeContextsIndex], - 'number_of_detached_contexts': buffer[kNumberOfDetachedContextsIndex] + total_heap_size: buffer[kTotalHeapSizeIndex], + total_heap_size_executable: buffer[kTotalHeapSizeExecutableIndex], + total_physical_size: buffer[kTotalPhysicalSizeIndex], + total_available_size: buffer[kTotalAvailableSize], + used_heap_size: buffer[kUsedHeapSizeIndex], + heap_size_limit: buffer[kHeapSizeLimitIndex], + malloced_memory: buffer[kMallocedMemoryIndex], + peak_malloced_memory: buffer[kPeakMallocedMemoryIndex], + does_zap_garbage: buffer[kDoesZapGarbageIndex], + number_of_native_contexts: buffer[kNumberOfNativeContextsIndex], + number_of_detached_contexts: buffer[kNumberOfDetachedContextsIndex] }; } @@ -209,9 +209,9 @@ function getHeapCodeStatistics() { updateHeapCodeStatisticsBuffer(); return { - 'code_and_metadata_size': buffer[kCodeAndMetadataSizeIndex], - 'bytecode_and_metadata_size': buffer[kBytecodeAndMetadataSizeIndex], - 'external_script_source_size': buffer[kExternalScriptSourceSizeIndex] + code_and_metadata_size: buffer[kCodeAndMetadataSizeIndex], + bytecode_and_metadata_size: buffer[kBytecodeAndMetadataSizeIndex], + external_script_source_size: buffer[kExternalScriptSourceSizeIndex] }; } From d6124d8259d5fbbd1c7dff6da893373bf3e46665 Mon Sep 17 00:00:00 2001 From: Mestery Date: Sun, 29 Aug 2021 16:03:36 +0200 Subject: [PATCH 31/95] repl: fix top level await with surrogate characters Fixes: https://github.com/nodejs/node/issues/39929 PR-URL: https://github.com/nodejs/node/pull/39931 Reviewed-By: Ruben Bridgewater Reviewed-By: James M Snell Reviewed-By: Shingo Inoue --- lib/internal/repl/await.js | 3 +-- .../test-repl-preprocess-top-level-await.js | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/internal/repl/await.js b/lib/internal/repl/await.js index f28a7ea412bc3f..ba138fdfe8c8d9 100644 --- a/lib/internal/repl/await.js +++ b/lib/internal/repl/await.js @@ -1,7 +1,6 @@ 'use strict'; const { - ArrayFrom, ArrayPrototypeForEach, ArrayPrototypeIncludes, ArrayPrototypeJoin, @@ -155,7 +154,7 @@ for (const nodeType of ObjectKeys(walk.base)) { function processTopLevelAwait(src) { const wrapPrefix = '(async () => { '; const wrapped = `${wrapPrefix}${src} })()`; - const wrappedArray = ArrayFrom(wrapped); + const wrappedArray = StringPrototypeSplit(wrapped, ''); let root; try { root = parser.parse(wrapped, { ecmaVersion: 'latest' }); diff --git a/test/parallel/test-repl-preprocess-top-level-await.js b/test/parallel/test-repl-preprocess-top-level-await.js index 93d3d79a87bb08..656b616b71d9e6 100644 --- a/test/parallel/test-repl-preprocess-top-level-await.js +++ b/test/parallel/test-repl-preprocess-top-level-await.js @@ -9,17 +9,31 @@ const { processTopLevelAwait } = require('internal/repl/await'); // This test was created based on // https://cs.chromium.org/chromium/src/third_party/WebKit/LayoutTests/http/tests/inspector-unit/preprocess-top-level-awaits.js?rcl=358caaba5e763e71c4abb9ada2d9cd8b1188cac9 +const surrogate = ( + '"\u{1F601}\u{1f468}\u200d\u{1f469}\u200d\u{1f467}\u200d\u{1f466}"' +); + const testCases = [ [ '0', null ], [ 'await 0', '(async () => { return (await 0) })()' ], + [ `await ${surrogate}`, + `(async () => { return (await ${surrogate}) })()` ], [ 'await 0;', '(async () => { return (await 0); })()' ], + [ `await ${surrogate};`, + `(async () => { return (await ${surrogate}); })()` ], + [ `await ${surrogate};`, + `(async () => { return (await ${surrogate}); })()` ], [ '(await 0)', '(async () => { return ((await 0)) })()' ], + [ `(await ${surrogate})`, + `(async () => { return ((await ${surrogate})) })()` ], [ '(await 0);', '(async () => { return ((await 0)); })()' ], + [ `(await ${surrogate});`, + `(async () => { return ((await ${surrogate})); })()` ], [ 'async function foo() { await 0; }', null ], [ 'async () => await 0', @@ -28,8 +42,12 @@ const testCases = [ null ], [ 'await 0; return 0;', null ], + [ `await ${surrogate}; await ${surrogate};`, + `(async () => { await ${surrogate}; return (await ${surrogate}); })()` ], [ 'var a = await 1', 'var a; (async () => { void (a = await 1) })()' ], + [ `var a = await ${surrogate}`, + `var a; (async () => { void (a = await ${surrogate}) })()` ], [ 'let a = await 1', 'let a; (async () => { void (a = await 1) })()' ], [ 'const a = await 1', From f03bae7c82b3e7960ee995a1094dddf618b28152 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Wed, 8 Sep 2021 22:21:59 +0200 Subject: [PATCH 32/95] tools: update remark-html to v13.0.2 PR-URL: https://github.com/nodejs/node/pull/40043 Reviewed-By: Antoine du Hamel Reviewed-By: Rich Trott --- tools/doc/json.mjs | 2 +- tools/doc/package-lock.json | 14 +++++++------- tools/doc/package.json | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tools/doc/json.mjs b/tools/doc/json.mjs index 318be8fd2ffb4e..5bcfd9886587f2 100644 --- a/tools/doc/json.mjs +++ b/tools/doc/json.mjs @@ -187,7 +187,7 @@ export function jsonAPI({ filename }) { { type: 'root', children: nodes.concat(definitions) } ); }) - .use(html) + .use(html, { sanitize: false }) .processSync('').toString().trim(); if (!current.desc) delete current.desc; } diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index 7d60e99f088c6c..04202b1fad56c9 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -17,7 +17,7 @@ "rehype-stringify": "8.0.0", "remark-frontmatter": "^3.0.0", "remark-gfm": "^1.0.0", - "remark-html": "13.0.1", + "remark-html": "13.0.2", "remark-parse": "^9.0.0", "remark-rehype": "8.1.0", "to-vfile": "7.1.0", @@ -951,9 +951,9 @@ } }, "node_modules/remark-html": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.1.tgz", - "integrity": "sha512-K5KQCXWVz+harnyC+UVM/J9eJWCgjYRqFeZoZf2NgP0iFbuuw/RgMZv3MA34b/OEpGnstl3oiOUtZzD3tJ+CBw==", + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.2.tgz", + "integrity": "sha512-LhSRQ+3RKdBqB/RGesFWkNNfkGqprDUCwjq54SylfFeNyZby5kqOG8Dn/vYsRoM8htab6EWxFXCY6XIZvMoRiQ==", "dev": true, "dependencies": { "hast-util-sanitize": "^3.0.0", @@ -1992,9 +1992,9 @@ } }, "remark-html": { - "version": "13.0.1", - "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.1.tgz", - "integrity": "sha512-K5KQCXWVz+harnyC+UVM/J9eJWCgjYRqFeZoZf2NgP0iFbuuw/RgMZv3MA34b/OEpGnstl3oiOUtZzD3tJ+CBw==", + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.2.tgz", + "integrity": "sha512-LhSRQ+3RKdBqB/RGesFWkNNfkGqprDUCwjq54SylfFeNyZby5kqOG8Dn/vYsRoM8htab6EWxFXCY6XIZvMoRiQ==", "dev": true, "requires": { "hast-util-sanitize": "^3.0.0", diff --git a/tools/doc/package.json b/tools/doc/package.json index 92ac19660d5b42..b349269bd7ed1e 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -13,7 +13,7 @@ "rehype-stringify": "8.0.0", "remark-frontmatter": "^3.0.0", "remark-gfm": "^1.0.0", - "remark-html": "13.0.1", + "remark-html": "13.0.2", "remark-parse": "^9.0.0", "remark-rehype": "8.1.0", "to-vfile": "7.1.0", From 67759585a03960883b9c33b574d0d1cc5e8698b8 Mon Sep 17 00:00:00 2001 From: Jiawen Geng Date: Mon, 6 Sep 2021 02:34:08 +0000 Subject: [PATCH 33/95] deps: patch for v8 on windows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40010 Reviewed-By: Michaël Zasso Reviewed-By: James M Snell --- .github/workflows/build-windows.yml | 6 +++++- common.gypi | 2 +- deps/v8/src/objects/fixed-array-inl.h | 2 +- deps/v8/src/objects/fixed-array.h | 6 +++--- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 1b0b336f4b8273..d489de708d130b 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -18,7 +18,11 @@ env: jobs: build-windows: if: github.event.pull_request.draft == false - runs-on: windows-latest + strategy: + matrix: + windows: [windows-2019, windows-2022] + fail-fast: false + runs-on: ${{ matrix.windows }} steps: - uses: actions/checkout@v2 - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/common.gypi b/common.gypi index 013f24b107408f..dea0ff5be51771 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.12', + 'v8_embedder_string': '-node.13', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/objects/fixed-array-inl.h b/deps/v8/src/objects/fixed-array-inl.h index 3b450634a59ee6..61ee533273e387 100644 --- a/deps/v8/src/objects/fixed-array-inl.h +++ b/deps/v8/src/objects/fixed-array-inl.h @@ -84,7 +84,7 @@ bool FixedArray::is_the_hole(Isolate* isolate, int index) { return get(isolate, index).IsTheHole(isolate); } -#if !defined(_WIN32) || defined(_WIN64) +#if !defined(_WIN32) || (defined(_WIN64) && _MSC_VER < 1930) void FixedArray::set(int index, Smi value) { DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map()); DCHECK_LT(static_cast(index), static_cast(length())); diff --git a/deps/v8/src/objects/fixed-array.h b/deps/v8/src/objects/fixed-array.h index 23904b81968509..8d6e4b1dfb7949 100644 --- a/deps/v8/src/objects/fixed-array.h +++ b/deps/v8/src/objects/fixed-array.h @@ -134,7 +134,9 @@ class FixedArray inline bool is_the_hole(Isolate* isolate, int index); // Setter that doesn't need write barrier. -#if defined(_WIN32) && !defined(_WIN64) +#if !defined(_WIN32) || (defined(_WIN64) && _MSC_VER < 1930) + inline void set(int index, Smi value); +#else inline void set(int index, Smi value) { DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map()); DCHECK_LT(static_cast(index), static_cast(length())); @@ -142,8 +144,6 @@ class FixedArray int offset = OffsetOfElementAt(index); RELAXED_WRITE_FIELD(*this, offset, value); } -#else - inline void set(int index, Smi value); #endif // Setter with explicit barrier mode. From a71579b05e96798ca79ab3ffc5ccd0cba71a1cf6 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 6 Sep 2021 18:15:40 -0700 Subject: [PATCH 34/95] meta: add more mailmap entries for bajtos MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40023 Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- .mailmap | 4 +++- AUTHORS | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.mailmap b/.mailmap index c42691b0a3af33..f691ac78ae2284 100644 --- a/.mailmap +++ b/.mailmap @@ -291,7 +291,9 @@ Minqi Pan Minuk Park Minwoo Jung Minwoo Jung -Miroslav Bajtoš +Miroslav Bajtoš +Miroslav Bajtoš +Miroslav Bajtoš Mitar Milutinovic Mithun Sasidharan Myles Borins diff --git a/AUTHORS b/AUTHORS index 03a9889921cc45..a7474dd6671a3d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -444,7 +444,7 @@ Ryan Graham Kelly Gerber Ryan Doenges Sean Silva -Miroslav Bajtoš +Miroslav Bajtoš Olof Johansson Sam Roberts Kevin Locke @@ -1626,7 +1626,6 @@ Pini Houri Runite618 phisixersai hsmtkk -Miroslav Bajtoš Sebastian Murphy 陈刚 Jon Moss From fc45cbe7a8bfd3324b5751d424aa340d89ffc522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Tue, 7 Sep 2021 14:17:41 +0000 Subject: [PATCH 35/95] crypto: fix default MGF1 hash for OpenSSL 3 Refs: https://github.com/nodejs/node/pull/39999 PR-URL: https://github.com/nodejs/node/pull/40031 Reviewed-By: James M Snell Reviewed-By: Filip Skokan --- src/crypto/crypto_rsa.cc | 13 +++++++++++-- test/parallel/test-crypto-keygen.js | 22 ++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/src/crypto/crypto_rsa.cc b/src/crypto/crypto_rsa.cc index 1bbf9a1753e4e2..30181dece8b541 100644 --- a/src/crypto/crypto_rsa.cc +++ b/src/crypto/crypto_rsa.cc @@ -63,10 +63,19 @@ EVPKeyCtxPointer RsaKeyGenTraits::Setup(RsaKeyPairGenConfig* params) { return EVPKeyCtxPointer(); } - if (params->params.mgf1_md != nullptr && + // TODO(tniessen): This appears to only be necessary in OpenSSL 3, while + // OpenSSL 1.1.1 behaves as recommended by RFC 8017 and defaults the MGF1 + // hash algorithm to the RSA-PSS hashAlgorithm. Remove this code if the + // behavior of OpenSSL 3 changes. + const EVP_MD* mgf1_md = params->params.mgf1_md; + if (mgf1_md == nullptr && params->params.md != nullptr) { + mgf1_md = params->params.md; + } + + if (mgf1_md != nullptr && EVP_PKEY_CTX_set_rsa_pss_keygen_mgf1_md( ctx.get(), - params->params.mgf1_md) <= 0) { + mgf1_md) <= 0) { return EVPKeyCtxPointer(); } diff --git a/test/parallel/test-crypto-keygen.js b/test/parallel/test-crypto-keygen.js index d35eeae5b98ed5..4f598877b1b9cf 100644 --- a/test/parallel/test-crypto-keygen.js +++ b/test/parallel/test-crypto-keygen.js @@ -369,6 +369,28 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); })); } +{ + // RFC 8017, 9.1.: "Assuming that the mask generation function is based on a + // hash function, it is RECOMMENDED that the hash function be the same as the + // one that is applied to the message." + + generateKeyPair('rsa-pss', { + modulusLength: 512, + hashAlgorithm: 'sha256', + saltLength: 16 + }, common.mustSucceed((publicKey, privateKey) => { + const expectedKeyDetails = { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha256', + mgf1HashAlgorithm: 'sha256', + saltLength: 16 + }; + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, expectedKeyDetails); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, expectedKeyDetails); + })); +} + { const privateKeyEncoding = { type: 'pkcs8', From d657ae6f8a7966da00b068f2db349d3509f4a3b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sun, 5 Sep 2021 10:27:41 +0000 Subject: [PATCH 36/95] crypto: fix RSA-PSS default saltLength PR-URL: https://github.com/nodejs/node/pull/39999 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Filip Skokan --- src/crypto/crypto_rsa.cc | 9 +++++-- test/parallel/test-crypto-keygen.js | 37 +++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 2 deletions(-) diff --git a/src/crypto/crypto_rsa.cc b/src/crypto/crypto_rsa.cc index 30181dece8b541..d2307c33f5de87 100644 --- a/src/crypto/crypto_rsa.cc +++ b/src/crypto/crypto_rsa.cc @@ -79,10 +79,15 @@ EVPKeyCtxPointer RsaKeyGenTraits::Setup(RsaKeyPairGenConfig* params) { return EVPKeyCtxPointer(); } - if (params->params.saltlen >= 0 && + int saltlen = params->params.saltlen; + if (saltlen < 0 && params->params.md != nullptr) { + saltlen = EVP_MD_size(params->params.md); + } + + if (saltlen >= 0 && EVP_PKEY_CTX_set_rsa_pss_keygen_saltlen( ctx.get(), - params->params.saltlen) <= 0) { + saltlen) <= 0) { return EVPKeyCtxPointer(); } } diff --git a/test/parallel/test-crypto-keygen.js b/test/parallel/test-crypto-keygen.js index 4f598877b1b9cf..2647c16a9a906c 100644 --- a/test/parallel/test-crypto-keygen.js +++ b/test/parallel/test-crypto-keygen.js @@ -391,6 +391,43 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); })); } +{ + // RFC 8017, A.2.3.: "For a given hashAlgorithm, the default value of + // saltLength is the octet length of the hash value." + + generateKeyPair('rsa-pss', { + modulusLength: 512, + hashAlgorithm: 'sha512' + }, common.mustSucceed((publicKey, privateKey) => { + const expectedKeyDetails = { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha512', + mgf1HashAlgorithm: 'sha512', + saltLength: 64 + }; + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, expectedKeyDetails); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, expectedKeyDetails); + })); + + // It is still possible to explicitly set saltLength to 0. + generateKeyPair('rsa-pss', { + modulusLength: 512, + hashAlgorithm: 'sha512', + saltLength: 0 + }, common.mustSucceed((publicKey, privateKey) => { + const expectedKeyDetails = { + modulusLength: 512, + publicExponent: 65537n, + hashAlgorithm: 'sha512', + mgf1HashAlgorithm: 'sha512', + saltLength: 0 + }; + assert.deepStrictEqual(publicKey.asymmetricKeyDetails, expectedKeyDetails); + assert.deepStrictEqual(privateKey.asymmetricKeyDetails, expectedKeyDetails); + })); +} + { const privateKeyEncoding = { type: 'pkcs8', From 9f3a015b6072b193ad127fef502e790f6c2d0052 Mon Sep 17 00:00:00 2001 From: Dominic Elm Date: Thu, 2 Sep 2021 15:17:42 +0200 Subject: [PATCH 37/95] src: add option to disable loading native addons PR-URL: https://github.com/nodejs/node/pull/39977 Reviewed-By: Anna Henningsen Reviewed-By: Bradley Farias Reviewed-By: Guy Bedford Reviewed-By: Michael Dawson --- doc/api/cli.md | 10 ++++ doc/api/errors.md | 9 +++ doc/api/packages.md | 20 +++++-- doc/node.1 | 5 ++ lib/internal/modules/cjs/helpers.js | 10 +++- lib/internal/modules/esm/resolve.js | 11 +++- src/env-inl.h | 5 ++ src/env.h | 1 + src/node.h | 8 ++- src/node_binding.cc | 6 ++ src/node_errors.h | 1 + src/node_options.cc | 5 ++ src/node_options.h | 1 + src/node_worker.cc | 2 + test/addons/no-addons/binding.gyp | 9 +++ test/addons/no-addons/test-worker.js | 59 +++++++++++++++++++ test/addons/no-addons/test.js | 43 ++++++++++++++ test/es-module/test-esm-no-addons.mjs | 27 +++++++++ .../loader-with-custom-condition.mjs | 10 +++- .../node_modules/pkgexports/addons-entry.js | 3 + .../pkgexports/no-addons-entry.js | 3 + .../node_modules/pkgexports/package.json | 4 ++ .../test-no-addons-resolution-condition.js | 29 +++++++++ 23 files changed, 272 insertions(+), 9 deletions(-) create mode 100644 test/addons/no-addons/binding.gyp create mode 100644 test/addons/no-addons/test-worker.js create mode 100644 test/addons/no-addons/test.js create mode 100644 test/es-module/test-esm-no-addons.mjs create mode 100644 test/fixtures/node_modules/pkgexports/addons-entry.js create mode 100644 test/fixtures/node_modules/pkgexports/no-addons-entry.js create mode 100644 test/parallel/test-no-addons-resolution-condition.js diff --git a/doc/api/cli.md b/doc/api/cli.md index e3294958f8c1d8..7c0d5e88d4d640 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -595,6 +595,15 @@ added: v7.10.0 This option is a no-op. It is kept for compatibility. +### `--no-addons` + + +Disable the `node-addons` exports condition as well as disable loading +native addons. When `--no-addons` is specified, calling `process.dlopen` or +requiring a native C++ addon will fail and throw an exception. + ### `--no-deprecation` + +Loading native addons has been disabled using [`--no-addons`][]. + ### `ERR_DLOPEN_FAILED` + +Type: Documentation-only (supports [`--pending-deprecation`][]) + +The remapping of specifiers ending in `"/"` like `import 'pkg/x/'` is deprecated +for package `"exports"` and `"imports"` pattern resolutions. + [Legacy URL API]: url.md#url_legacy_url_api [NIST SP 800-38D]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf [RFC 6066]: https://tools.ietf.org/html/rfc6066#section-3 diff --git a/doc/api/esm.md b/doc/api/esm.md index eb9811ada8b1a5..54892bf85ca7d0 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1190,6 +1190,8 @@ _isImports_, _conditions_) > _expansionKey_ up to but excluding the first _"*"_ character. > 1. If _patternBase_ is not **null** and _matchKey_ starts with but is not > equal to _patternBase_, then +> 1. If _matchKey_ ends with _"/"_, throw an _Invalid Module Specifier_ +> error. > 1. Let _patternTrailer_ be the substring of _expansionKey_ from the > index after the first _"*"_ character. > 1. If _patternTrailer_ has zero length, or if _matchKey_ ends with diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index c0f33f38e6810f..a8ce281af6eb96 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -40,6 +40,7 @@ const { sep, relative, resolve } = require('path'); const preserveSymlinks = getOptionValue('--preserve-symlinks'); const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main'); const typeFlag = getOptionValue('--input-type'); +const pendingDeprecation = getOptionValue('--pending-deprecation'); const { URL, pathToFileURL, fileURLToPath } = require('internal/url'); const { ERR_INPUT_TYPE_NOT_ALLOWED, @@ -106,6 +107,22 @@ function emitFolderMapDeprecation(match, pjsonUrl, isExports, base) { ); } +function emitTrailingSlashPatternDeprecation(match, pjsonUrl, isExports, base) { + if (!pendingDeprecation) return; + const pjsonPath = fileURLToPath(pjsonUrl); + if (emittedPackageWarnings.has(pjsonPath + '|' + match)) + return; + emittedPackageWarnings.add(pjsonPath + '|' + match); + process.emitWarning( + `Use of deprecated trailing slash pattern mapping "${match}" in the ${ + isExports ? '"exports"' : '"imports"'} field module resolution of the ` + + `package at ${pjsonPath}${base ? ` imported from ${fileURLToPath(base)}` : + ''}. Mapping specifiers ending in "/" is no longer supported.`, + 'DeprecationWarning', + 'DEP0155' + ); +} + /** * @param {URL} url * @param {URL} packageJSONUrl @@ -639,6 +656,9 @@ function packageExportsResolve( if (patternIndex !== -1 && StringPrototypeStartsWith(packageSubpath, StringPrototypeSlice(key, 0, patternIndex))) { + if (StringPrototypeEndsWith(packageSubpath, '/')) + emitTrailingSlashPatternDeprecation(packageSubpath, packageJSONUrl, + true, base); const patternTrailer = StringPrototypeSlice(key, patternIndex + 1); if (packageSubpath.length >= key.length && StringPrototypeEndsWith(packageSubpath, patternTrailer) && diff --git a/test/es-module/test-esm-exports-deprecations.mjs b/test/es-module/test-esm-exports-deprecations.mjs index 2dd2756e2ee844..8c7a07b0204b9a 100644 --- a/test/es-module/test-esm-exports-deprecations.mjs +++ b/test/es-module/test-esm-exports-deprecations.mjs @@ -1,3 +1,4 @@ +// Flags: --pending-deprecation import { mustCall } from '../common/index.mjs'; import assert from 'assert'; @@ -5,7 +6,10 @@ let curWarning = 0; const expectedWarnings = [ '"./sub/"', '"./fallbackdir/"', + '"./trailing-pattern-slash/"', '"./subpath/"', + '"./subpath/dir1/"', + '"./subpath/dir2/"', 'no_exports', 'default_index', ]; diff --git a/test/es-module/test-esm-exports.mjs b/test/es-module/test-esm-exports.mjs index 0bf361d4863fff..11323351b6512f 100644 --- a/test/es-module/test-esm-exports.mjs +++ b/test/es-module/test-esm-exports.mjs @@ -41,13 +41,19 @@ import fromInside from '../fixtures/node_modules/pkgexports/lib/hole.js'; ['pkgexports/dir2/dir2/trailer', { default: 'index' }], ['pkgexports/a/dir1/dir1', { default: 'main' }], ['pkgexports/a/b/dir1/dir1', { default: 'main' }], + + // Deprecated: + ['pkgexports/trailing-pattern-slash/', + { default: 'trailing-pattern-slash' }], ]); if (isRequire) { validSpecifiers.set('pkgexports/subpath/file', { default: 'file' }); validSpecifiers.set('pkgexports/subpath/dir1', { default: 'main' }); + // Deprecated: validSpecifiers.set('pkgexports/subpath/dir1/', { default: 'main' }); validSpecifiers.set('pkgexports/subpath/dir2', { default: 'index' }); + // Deprecated: validSpecifiers.set('pkgexports/subpath/dir2/', { default: 'index' }); } else { // No exports or main field diff --git a/test/es-module/test-esm-local-deprecations.mjs b/test/es-module/test-esm-local-deprecations.mjs index a1945f66f3422f..8d946b6650ed3b 100644 --- a/test/es-module/test-esm-local-deprecations.mjs +++ b/test/es-module/test-esm-local-deprecations.mjs @@ -1,3 +1,5 @@ +// Flags: --pending-deprecation + import '../common/index.mjs'; import assert from 'assert'; import fixtures from '../common/fixtures.js'; @@ -9,10 +11,14 @@ const selfDeprecatedFolders = const deprecatedFoldersIgnore = fixtures.path('/es-modules/deprecated-folders-ignore/main.js'); +const deprecatedTrailingSlashPattern = + fixtures.path('/es-modules/pattern-trailing-slash.mjs'); + const expectedWarnings = [ '"./" in the "exports" field', '"#self/" in the "imports" field', '"./folder/" in the "exports" field', + '"./trailing-pattern-slash/" in the "exports" field', ]; process.addListener('warning', (warning) => { @@ -28,5 +34,6 @@ process.on('exit', () => { (async () => { await import(pathToFileURL(selfDeprecatedFolders)); await import(pathToFileURL(deprecatedFoldersIgnore)); + await import(pathToFileURL(deprecatedTrailingSlashPattern)); })() .catch((err) => console.error(err)); diff --git a/test/fixtures/es-modules/pattern-trailing-slash.mjs b/test/fixtures/es-modules/pattern-trailing-slash.mjs new file mode 100644 index 00000000000000..e289305ee026b9 --- /dev/null +++ b/test/fixtures/es-modules/pattern-trailing-slash.mjs @@ -0,0 +1 @@ +import 'pkgexports/trailing-pattern-slash/'; diff --git a/test/fixtures/node_modules/pkgexports/package.json b/test/fixtures/node_modules/pkgexports/package.json index 7f8f994ac398bd..fe46111f793314 100644 --- a/test/fixtures/node_modules/pkgexports/package.json +++ b/test/fixtures/node_modules/pkgexports/package.json @@ -61,6 +61,7 @@ "./subpath/": "./subpath/", "./subpath/sub-*": "./subpath/dir1/*.js", "./subpath/sub-*.js": "./subpath/dir1/*.js", - "./features/*": "./subpath/*/*.js" + "./features/*": "./subpath/*/*.js", + "./trailing-pattern-slash*": "./trailing-pattern-slash*index.js" } } diff --git a/test/fixtures/node_modules/pkgexports/trailing-pattern-slash/index.js b/test/fixtures/node_modules/pkgexports/trailing-pattern-slash/index.js new file mode 100644 index 00000000000000..613bddbb6a44dd --- /dev/null +++ b/test/fixtures/node_modules/pkgexports/trailing-pattern-slash/index.js @@ -0,0 +1 @@ +module.exports = 'trailing-pattern-slash'; From ec6de1195a89c4f3d6bc5961406349351742bcbe Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 14 Sep 2021 13:42:13 -0700 Subject: [PATCH 53/95] tools: update ansi-regex in lint-md rollup MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update ansi-regex from 6.0.0 to 6.0.1. Refs: https://snyk.io/vuln/SNYK-JS-ANSIREGEX-1583908 PR-URL: https://github.com/nodejs/node/pull/40112 Reviewed-By: Antoine du Hamel Reviewed-By: Michaël Zasso Reviewed-By: Qingyu Deng Reviewed-By: Tobias Nießen --- tools/lint-md.mjs | 2 +- tools/node-lint-md-cli-rollup/package-lock.json | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tools/lint-md.mjs b/tools/lint-md.mjs index 8f36b396fbad20..f5e5a47a30e98b 100644 --- a/tools/lint-md.mjs +++ b/tools/lint-md.mjs @@ -29678,7 +29678,7 @@ const supportsColor = { function ansiRegex({onlyFirst = false} = {}) { const pattern = [ - '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' ].join('|'); diff --git a/tools/node-lint-md-cli-rollup/package-lock.json b/tools/node-lint-md-cli-rollup/package-lock.json index c1c0b9e58b60f9..fd8ca9c653065a 100644 --- a/tools/node-lint-md-cli-rollup/package-lock.json +++ b/tools/node-lint-md-cli-rollup/package-lock.json @@ -282,9 +282,9 @@ "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "node_modules/ansi-regex": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.0.tgz", - "integrity": "sha512-tAaOSrWCHF+1Ear1Z4wnJCXA9GGox4K6Ic85a5qalES2aeEwQGr7UC93mwef49536PkCYjzkp0zIxfFvexJ6zQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", "engines": { "node": ">=12" }, @@ -3696,9 +3696,9 @@ "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" }, "ansi-regex": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.0.tgz", - "integrity": "sha512-tAaOSrWCHF+1Ear1Z4wnJCXA9GGox4K6Ic85a5qalES2aeEwQGr7UC93mwef49536PkCYjzkp0zIxfFvexJ6zQ==" + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==" }, "ansi-styles": { "version": "4.3.0", From 1eca9bc5b21259cc3394c4b80888c493d636e170 Mon Sep 17 00:00:00 2001 From: Guy Bedford Date: Wed, 8 Sep 2021 12:14:03 -0700 Subject: [PATCH 54/95] module: support pattern trailers for imports field PR-URL: https://github.com/nodejs/node/pull/40041 Reviewed-By: James M Snell Reviewed-By: Geoffrey Booth --- lib/internal/modules/esm/resolve.js | 38 ++++++++++++------- test/es-module/test-esm-imports.mjs | 2 + .../es-modules/pkgimports/package.json | 1 + 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index a8ce281af6eb96..1b84fe9f87b773 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -22,7 +22,6 @@ const { StringPrototypeSlice, StringPrototypeSplit, StringPrototypeStartsWith, - StringPrototypeSubstr, } = primordials; const internalFS = require('internal/fs/utils'); const { NativeModule } = require('internal/bootstrap/loaders'); @@ -724,7 +723,9 @@ function packageImportsResolve(name, base, conditions) { packageJSONUrl = pathToFileURL(packageConfig.pjsonPath); const imports = packageConfig.imports; if (imports) { - if (ObjectPrototypeHasOwnProperty(imports, name)) { + if (ObjectPrototypeHasOwnProperty(imports, name) && + !StringPrototypeIncludes(name, '*') && + !StringPrototypeEndsWith(name, '/')) { const resolved = resolvePackageTarget( packageJSONUrl, imports[name], '', name, base, false, true, conditions ); @@ -732,30 +733,39 @@ function packageImportsResolve(name, base, conditions) { return { resolved, exact: true }; } else { let bestMatch = ''; + let bestMatchSubpath; const keys = ObjectGetOwnPropertyNames(imports); for (let i = 0; i < keys.length; i++) { const key = keys[i]; - if (key[key.length - 1] === '*' && + const patternIndex = StringPrototypeIndexOf(key, '*'); + if (patternIndex !== -1 && StringPrototypeStartsWith(name, - StringPrototypeSlice(key, 0, -1)) && - name.length >= key.length && - key.length > bestMatch.length) { - bestMatch = key; + StringPrototypeSlice(key, 0, + patternIndex))) { + const patternTrailer = StringPrototypeSlice(key, patternIndex + 1); + if (name.length >= key.length && + StringPrototypeEndsWith(name, patternTrailer) && + patternKeyCompare(bestMatch, key) === 1 && + StringPrototypeLastIndexOf(key, '*') === patternIndex) { + bestMatch = key; + bestMatchSubpath = StringPrototypeSlice( + name, patternIndex, name.length - patternTrailer.length); + } } else if (key[key.length - 1] === '/' && StringPrototypeStartsWith(name, key) && - key.length > bestMatch.length) { + patternKeyCompare(bestMatch, key) === 1) { bestMatch = key; + bestMatchSubpath = StringPrototypeSlice(name, key.length); } } if (bestMatch) { const target = imports[bestMatch]; - const pattern = bestMatch[bestMatch.length - 1] === '*'; - const subpath = StringPrototypeSubstr(name, bestMatch.length - - (pattern ? 1 : 0)); - const resolved = resolvePackageTarget( - packageJSONUrl, target, subpath, bestMatch, base, pattern, true, - conditions); + const pattern = StringPrototypeIncludes(bestMatch, '*'); + const resolved = resolvePackageTarget(packageJSONUrl, target, + bestMatchSubpath, bestMatch, + base, pattern, true, + conditions); if (resolved !== null) { if (!pattern) emitFolderMapDeprecation(bestMatch, packageJSONUrl, false, base); diff --git a/test/es-module/test-esm-imports.mjs b/test/es-module/test-esm-imports.mjs index 694496a2ff2c93..577e33b60d08ec 100644 --- a/test/es-module/test-esm-imports.mjs +++ b/test/es-module/test-esm-imports.mjs @@ -20,6 +20,8 @@ const { requireImport, importImport } = importer; ['#external', { default: 'asdf' }], // External subpath imports ['#external/subpath/asdf.js', { default: 'asdf' }], + // Trailing pattern imports + ['#subpath/asdf.asdf', { default: 'test' }], ]); for (const [validSpecifier, expected] of internalImports) { diff --git a/test/fixtures/es-modules/pkgimports/package.json b/test/fixtures/es-modules/pkgimports/package.json index a2224b39ddd2ac..299ce9c197b554 100644 --- a/test/fixtures/es-modules/pkgimports/package.json +++ b/test/fixtures/es-modules/pkgimports/package.json @@ -6,6 +6,7 @@ "require": "./requirebranch.js" }, "#subpath/*": "./sub/*", + "#subpath/*.asdf": "./test.js", "#external": "pkgexports/valid-cjs", "#external/subpath/*": "pkgexports/sub/*", "#external/invalidsubpath/": "pkgexports/sub", From f666f5a8d12df9b8eead5ac5063efb8faf5a0109 Mon Sep 17 00:00:00 2001 From: wwwzbwcom Date: Wed, 15 Sep 2021 14:36:42 +0800 Subject: [PATCH 55/95] events: fix duplicate require which cause performance penalty PR-URL: https://github.com/nodejs/node/pull/39892 Reviewed-By: Luigi Pinca Reviewed-By: Nitzan Uziely Reviewed-By: James M Snell Reviewed-By: Ruben Bridgewater Reviewed-By: Minwoo Jung Reviewed-By: Colin Ihrig Reviewed-By: Darshan Sen Reviewed-By: Zijian Liu Reviewed-By: Qingyu Deng --- lib/events.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/lib/events.js b/lib/events.js index 557461084631f3..ef8090e57778e7 100644 --- a/lib/events.js +++ b/lib/events.js @@ -50,6 +50,7 @@ const { SymbolAsyncIterator, } = primordials; const kRejection = SymbolFor('nodejs.rejection'); +const { inspect } = require('internal/util/inspect'); let spliceOne; @@ -63,10 +64,6 @@ const { }, } = require('internal/errors'); -const { - inspect -} = require('internal/util/inspect'); - const { validateAbortSignal, validateBoolean, @@ -372,7 +369,6 @@ EventEmitter.prototype.emit = function emit(type, ...args) { } let stringifiedEr; - const { inspect } = require('internal/util/inspect'); try { stringifiedEr = inspect(er); } catch { From 8aad81dd996231a65f2b4bdb54934d0e785c5cda Mon Sep 17 00:00:00 2001 From: FrankQiu Date: Fri, 3 Sep 2021 23:33:15 +0800 Subject: [PATCH 56/95] doc: add full list of subsystems PR-URL: https://github.com/nodejs/node/pull/39971 Reviewed-By: Antoine du Hamel Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Qingyu Deng --- doc/guides/contributing/pull-requests.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/guides/contributing/pull-requests.md b/doc/guides/contributing/pull-requests.md index 763b05491592d1..29c0554d35fd7f 100644 --- a/doc/guides/contributing/pull-requests.md +++ b/doc/guides/contributing/pull-requests.md @@ -579,6 +579,8 @@ If you want to know more about the code review and the landing process, see the * `test` * `tools` +You can find the full list of supported subsystems in the +[nodejs/core-validate-commit][] repository. More than one subsystem may be valid for any particular issue or pull request. [Building guide]: ../../../BUILDING.md @@ -591,5 +593,6 @@ More than one subsystem may be valid for any particular issue or pull request. [guide for writing tests in Node.js]: ../writing-tests.md [hiding-a-comment]: https://help.github.com/articles/managing-disruptive-comments/#hiding-a-comment [https://ci.nodejs.org/]: https://ci.nodejs.org/ +[nodejs/core-validate-commit]: https://github.com/nodejs/core-validate-commit/blob/main/lib/rules/subsystem.js [pull request template]: https://raw.githubusercontent.com/nodejs/node/HEAD/.github/PULL_REQUEST_TEMPLATE.md [running tests]: ../../../BUILDING.md#running-tests From 0df47d5843ac44b8cccb55ed5b871c5d7159c028 Mon Sep 17 00:00:00 2001 From: npm team Date: Thu, 9 Sep 2021 20:01:11 +0000 Subject: [PATCH 57/95] deps: upgrade npm to 7.23.0 PR-URL: https://github.com/nodejs/node/pull/40055 Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Beth Griggs --- .../docs/content/commands/npm-deprecate.md | 2 +- deps/npm/docs/content/using-npm/workspaces.md | 10 + .../docs/output/commands/npm-deprecate.html | 2 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm.html | 2 +- .../npm/docs/output/using-npm/workspaces.html | 7 +- deps/npm/lib/install.js | 22 +- deps/npm/lib/utils/error-message.js | 2 +- deps/npm/man/man1/npm-access.1 | 2 +- deps/npm/man/man1/npm-adduser.1 | 2 +- deps/npm/man/man1/npm-audit.1 | 2 +- deps/npm/man/man1/npm-bin.1 | 2 +- deps/npm/man/man1/npm-bugs.1 | 2 +- deps/npm/man/man1/npm-cache.1 | 2 +- deps/npm/man/man1/npm-ci.1 | 2 +- deps/npm/man/man1/npm-completion.1 | 2 +- deps/npm/man/man1/npm-config.1 | 2 +- deps/npm/man/man1/npm-dedupe.1 | 2 +- deps/npm/man/man1/npm-deprecate.1 | 4 +- deps/npm/man/man1/npm-diff.1 | 2 +- deps/npm/man/man1/npm-dist-tag.1 | 2 +- deps/npm/man/man1/npm-docs.1 | 2 +- deps/npm/man/man1/npm-doctor.1 | 2 +- deps/npm/man/man1/npm-edit.1 | 2 +- deps/npm/man/man1/npm-exec.1 | 2 +- deps/npm/man/man1/npm-explain.1 | 2 +- deps/npm/man/man1/npm-explore.1 | 2 +- deps/npm/man/man1/npm-find-dupes.1 | 2 +- deps/npm/man/man1/npm-fund.1 | 2 +- deps/npm/man/man1/npm-help-search.1 | 2 +- deps/npm/man/man1/npm-help.1 | 2 +- deps/npm/man/man1/npm-hook.1 | 2 +- deps/npm/man/man1/npm-init.1 | 2 +- deps/npm/man/man1/npm-install-ci-test.1 | 2 +- deps/npm/man/man1/npm-install-test.1 | 2 +- deps/npm/man/man1/npm-install.1 | 2 +- deps/npm/man/man1/npm-link.1 | 2 +- deps/npm/man/man1/npm-logout.1 | 2 +- deps/npm/man/man1/npm-ls.1 | 4 +- deps/npm/man/man1/npm-org.1 | 2 +- deps/npm/man/man1/npm-outdated.1 | 2 +- deps/npm/man/man1/npm-owner.1 | 2 +- deps/npm/man/man1/npm-pack.1 | 2 +- deps/npm/man/man1/npm-ping.1 | 2 +- deps/npm/man/man1/npm-pkg.1 | 2 +- deps/npm/man/man1/npm-prefix.1 | 2 +- deps/npm/man/man1/npm-profile.1 | 2 +- deps/npm/man/man1/npm-prune.1 | 2 +- deps/npm/man/man1/npm-publish.1 | 2 +- deps/npm/man/man1/npm-rebuild.1 | 2 +- deps/npm/man/man1/npm-repo.1 | 2 +- deps/npm/man/man1/npm-restart.1 | 2 +- deps/npm/man/man1/npm-root.1 | 2 +- deps/npm/man/man1/npm-run-script.1 | 2 +- deps/npm/man/man1/npm-search.1 | 2 +- deps/npm/man/man1/npm-set-script.1 | 2 +- deps/npm/man/man1/npm-shrinkwrap.1 | 2 +- deps/npm/man/man1/npm-star.1 | 2 +- deps/npm/man/man1/npm-stars.1 | 2 +- deps/npm/man/man1/npm-start.1 | 2 +- deps/npm/man/man1/npm-stop.1 | 2 +- deps/npm/man/man1/npm-team.1 | 2 +- deps/npm/man/man1/npm-test.1 | 2 +- deps/npm/man/man1/npm-token.1 | 2 +- deps/npm/man/man1/npm-uninstall.1 | 2 +- deps/npm/man/man1/npm-unpublish.1 | 2 +- deps/npm/man/man1/npm-unstar.1 | 2 +- deps/npm/man/man1/npm-update.1 | 2 +- deps/npm/man/man1/npm-version.1 | 2 +- deps/npm/man/man1/npm-view.1 | 2 +- deps/npm/man/man1/npm-whoami.1 | 2 +- deps/npm/man/man1/npm.1 | 4 +- deps/npm/man/man1/npx.1 | 2 +- deps/npm/man/man5/folders.5 | 2 +- deps/npm/man/man5/install.5 | 2 +- deps/npm/man/man5/npm-shrinkwrap-json.5 | 2 +- deps/npm/man/man5/npmrc.5 | 2 +- deps/npm/man/man5/package-json.5 | 2 +- deps/npm/man/man5/package-lock-json.5 | 2 +- deps/npm/man/man7/config.7 | 2 +- deps/npm/man/man7/developers.7 | 2 +- deps/npm/man/man7/orgs.7 | 2 +- deps/npm/man/man7/registry.7 | 2 +- deps/npm/man/man7/removal.7 | 2 +- deps/npm/man/man7/scope.7 | 2 +- deps/npm/man/man7/scripts.7 | 2 +- deps/npm/man/man7/workspaces.7 | 13 +- .../@npmcli/arborist/bin/actual.js | 6 +- .../@npmcli/arborist/bin/audit.js | 18 +- .../@npmcli/arborist/bin/dedupe.js | 9 +- .../@npmcli/arborist/bin/funding.js | 6 +- .../@npmcli/arborist/bin/ideal.js | 3 +- .../@npmcli/arborist/bin/lib/logging.js | 7 +- .../@npmcli/arborist/bin/lib/options.js | 26 +- .../@npmcli/arborist/bin/lib/timers.js | 9 +- .../@npmcli/arborist/bin/license.js | 14 +- .../@npmcli/arborist/bin/prune.js | 9 +- .../@npmcli/arborist/bin/reify.js | 9 +- .../@npmcli/arborist/bin/virtual.js | 6 +- .../@npmcli/arborist/lib/add-rm-pkg-deps.js | 39 +- .../@npmcli/arborist/lib/arborist/audit.js | 3 +- .../arborist/lib/arborist/build-ideal-tree.js | 197 +++-- .../@npmcli/arborist/lib/arborist/deduper.js | 3 +- .../@npmcli/arborist/lib/arborist/index.js | 12 +- .../arborist/lib/arborist/load-actual.js | 41 +- .../arborist/lib/arborist/load-virtual.js | 52 +- .../arborist/lib/arborist/load-workspaces.js | 6 +- .../@npmcli/arborist/lib/arborist/rebuild.js | 47 +- .../@npmcli/arborist/lib/arborist/reify.js | 229 +++-- .../@npmcli/arborist/lib/audit-report.js | 64 +- .../@npmcli/arborist/lib/calc-dep-flags.js | 27 +- .../@npmcli/arborist/lib/can-place-dep.js | 84 +- .../arborist/lib/case-insensitive-map.js | 6 +- .../arborist/lib/consistent-resolve.js | 3 +- .../arborist/lib/deepest-nesting-target.js | 6 +- .../@npmcli/arborist/lib/dep-valid.js | 12 +- .../node_modules/@npmcli/arborist/lib/diff.js | 96 ++- .../node_modules/@npmcli/arborist/lib/edge.js | 39 +- .../@npmcli/arborist/lib/gather-dep-set.js | 3 +- .../@npmcli/arborist/lib/inventory.js | 18 +- .../node_modules/@npmcli/arborist/lib/link.js | 23 +- .../node_modules/@npmcli/arborist/lib/node.js | 329 +++++--- .../@npmcli/arborist/lib/optional-set.js | 6 +- .../@npmcli/arborist/lib/peer-entry-sets.js | 15 +- .../@npmcli/arborist/lib/place-dep.js | 85 +- .../@npmcli/arborist/lib/printable.js | 62 +- .../@npmcli/arborist/lib/realpath.js | 18 +- .../@npmcli/arborist/lib/shrinkwrap.js | 251 ++++-- .../@npmcli/arborist/lib/signal-handling.js | 9 +- .../@npmcli/arborist/lib/spec-from-lock.js | 11 +- .../@npmcli/arborist/lib/tracker.js | 42 +- .../@npmcli/arborist/lib/tree-check.js | 18 +- .../@npmcli/arborist/lib/version-from-tgz.js | 6 +- .../node_modules/@npmcli/arborist/lib/vuln.js | 44 +- .../@npmcli/arborist/lib/yarn-lock.js | 42 +- .../@npmcli/arborist/package.json | 17 +- .../@npmcli/config/lib/set-envs.js | 2 + .../node_modules/@npmcli/config/package.json | 2 +- .../node_modules/are-we-there-yet/CHANGES.md | 37 - .../npm/node_modules/are-we-there-yet/LICENSE | 5 - .../node_modules/are-we-there-yet/LICENSE.md | 18 + .../are-we-there-yet/{ => lib}/index.js | 0 .../{ => lib}/tracker-base.js | 0 .../{ => lib}/tracker-group.js | 19 +- .../{ => lib}/tracker-stream.js | 0 .../are-we-there-yet/{ => lib}/tracker.js | 4 +- .../are-we-there-yet/package.json | 52 +- deps/npm/node_modules/isarray/Makefile | 6 - deps/npm/node_modules/isarray/component.json | 19 - deps/npm/node_modules/isarray/index.js | 5 - deps/npm/node_modules/isarray/package.json | 45 - deps/npm/node_modules/isarray/test.js | 20 - .../minipass-fetch/lib/request.js | 2 +- .../node_modules/minipass-fetch/package.json | 5 +- deps/npm/node_modules/npmlog/log.js | 102 ++- .../node_modules/are-we-there-yet/LICENSE.md | 18 + .../are-we-there-yet/lib/index.js | 4 + .../are-we-there-yet/lib/tracker-base.js | 11 + .../are-we-there-yet/lib/tracker-group.js | 116 +++ .../are-we-there-yet/lib/tracker-stream.js | 36 + .../are-we-there-yet/lib/tracker.js | 32 + .../are-we-there-yet/package.json | 53 ++ deps/npm/node_modules/npmlog/package.json | 4 +- .../process-nextick-args/index.js | 45 - .../process-nextick-args/license.md | 19 - .../process-nextick-args/package.json | 25 - .../process-nextick-args/readme.md | 18 - .../read-package-json/package.json | 10 +- .../read-package-json/read-json.js | 51 +- .../doc/wg-meetings/2015-01-30.md | 60 -- .../readable-stream/duplex-browser.js | 1 - .../node_modules/readable-stream/duplex.js | 1 - .../readable-stream/errors-browser.js | 127 +++ .../node_modules/readable-stream/errors.js | 116 +++ .../readable-stream/experimentalWarning.js | 17 + .../readable-stream/lib/_stream_duplex.js | 96 ++- .../lib/_stream_passthrough.js | 10 +- .../readable-stream/lib/_stream_readable.js | 787 ++++++++++-------- .../readable-stream/lib/_stream_transform.js | 71 +- .../readable-stream/lib/_stream_writable.js | 390 ++++----- .../lib/internal/streams/BufferList.js | 79 -- .../lib/internal/streams/async_iterator.js | 207 +++++ .../lib/internal/streams/buffer_list.js | 210 +++++ .../lib/internal/streams/destroy.js | 65 +- .../lib/internal/streams/end-of-stream.js | 104 +++ .../lib/internal/streams/from-browser.js | 3 + .../lib/internal/streams/from.js | 64 ++ .../lib/internal/streams/pipeline.js | 97 +++ .../lib/internal/streams/state.js | 27 + .../node_modules/readable-stream/package.json | 54 +- .../readable-stream/passthrough.js | 1 - .../readable-stream/readable-browser.js | 2 + .../node_modules/readable-stream/readable.js | 13 +- .../node_modules/readable-stream/transform.js | 1 - .../readable-stream/writable-browser.js | 1 - .../node_modules/readable-stream/writable.js | 8 - deps/npm/node_modules/safe-buffer/index.js | 3 + .../npm/node_modules/safe-buffer/package.json | 22 +- .../node_modules/string_decoder/package.json | 7 +- deps/npm/package.json | 12 +- .../test/lib/utils/error-message.js.test.cjs | 48 +- deps/npm/test/lib/install.js | 140 ++++ deps/npm/test/lib/utils/error-message.js | 8 + 203 files changed, 3991 insertions(+), 1957 deletions(-) delete mode 100644 deps/npm/node_modules/are-we-there-yet/CHANGES.md delete mode 100644 deps/npm/node_modules/are-we-there-yet/LICENSE create mode 100644 deps/npm/node_modules/are-we-there-yet/LICENSE.md rename deps/npm/node_modules/are-we-there-yet/{ => lib}/index.js (100%) rename deps/npm/node_modules/are-we-there-yet/{ => lib}/tracker-base.js (100%) rename deps/npm/node_modules/are-we-there-yet/{ => lib}/tracker-group.js (88%) rename deps/npm/node_modules/are-we-there-yet/{ => lib}/tracker-stream.js (100%) rename deps/npm/node_modules/are-we-there-yet/{ => lib}/tracker.js (90%) delete mode 100644 deps/npm/node_modules/isarray/Makefile delete mode 100644 deps/npm/node_modules/isarray/component.json delete mode 100644 deps/npm/node_modules/isarray/index.js delete mode 100644 deps/npm/node_modules/isarray/package.json delete mode 100644 deps/npm/node_modules/isarray/test.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/LICENSE.md create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/index.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-base.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-group.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-stream.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker.js create mode 100644 deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/package.json delete mode 100644 deps/npm/node_modules/process-nextick-args/index.js delete mode 100644 deps/npm/node_modules/process-nextick-args/license.md delete mode 100644 deps/npm/node_modules/process-nextick-args/package.json delete mode 100644 deps/npm/node_modules/process-nextick-args/readme.md delete mode 100644 deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md delete mode 100644 deps/npm/node_modules/readable-stream/duplex-browser.js delete mode 100644 deps/npm/node_modules/readable-stream/duplex.js create mode 100644 deps/npm/node_modules/readable-stream/errors-browser.js create mode 100644 deps/npm/node_modules/readable-stream/errors.js create mode 100644 deps/npm/node_modules/readable-stream/experimentalWarning.js delete mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/BufferList.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/async_iterator.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/from-browser.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/from.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js create mode 100644 deps/npm/node_modules/readable-stream/lib/internal/streams/state.js delete mode 100644 deps/npm/node_modules/readable-stream/passthrough.js delete mode 100644 deps/npm/node_modules/readable-stream/transform.js delete mode 100644 deps/npm/node_modules/readable-stream/writable-browser.js delete mode 100644 deps/npm/node_modules/readable-stream/writable.js diff --git a/deps/npm/docs/content/commands/npm-deprecate.md b/deps/npm/docs/content/commands/npm-deprecate.md index 4888e42e8ba867..438a54ec6e4f36 100644 --- a/deps/npm/docs/content/commands/npm-deprecate.md +++ b/deps/npm/docs/content/commands/npm-deprecate.md @@ -76,4 +76,4 @@ password, npm will prompt on the command line for one. * [npm publish](/commands/npm-publish) * [npm registry](/using-npm/registry) * [npm owner](/commands/npm-owner) -* [npm owner](/commands/npm-adduser) +* [npm adduser](/commands/npm-adduser) diff --git a/deps/npm/docs/content/using-npm/workspaces.md b/deps/npm/docs/content/using-npm/workspaces.md index 7cc125b3c7a7cc..ae834c0cc7e225 100644 --- a/deps/npm/docs/content/using-npm/workspaces.md +++ b/deps/npm/docs/content/using-npm/workspaces.md @@ -176,6 +176,16 @@ npm run test --workspaces Will run the `test` script in both `./packages/a` and `./packages/b`. +### Ignoring missing scripts + +It is not required for all of the workspaces to implement scripts run with the `npm run` command. + +By running the command with the `--if-present` flag, npm will ignore workspaces missing target script. + +``` +npm run test --workspaces --if-present +``` + ### See also * [npm install](/commands/npm-install) diff --git a/deps/npm/docs/output/commands/npm-deprecate.html b/deps/npm/docs/output/commands/npm-deprecate.html index 9544eb5b95efdb..5ed7165983bde9 100644 --- a/deps/npm/docs/output/commands/npm-deprecate.html +++ b/deps/npm/docs/output/commands/npm-deprecate.html @@ -194,7 +194,7 @@

See Also

  • npm publish
  • npm registry
  • npm owner
  • -
  • npm owner
  • +
  • npm adduser
  • diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index e45846324faac9..252e478513ffce 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -159,7 +159,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm’s source tree will show:

    -
    npm@7.21.1 /path/to/npm
    +
    npm@7.23.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index a2feb42e43b6da..6670f691d59ac6 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -148,7 +148,7 @@

    Table of contents

    npm <command> [args]
     

    Version

    -

    7.21.1

    +

    7.23.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html index f0a5945d61153e..e4ce7a3840e058 100644 --- a/deps/npm/docs/output/using-npm/workspaces.html +++ b/deps/npm/docs/output/using-npm/workspaces.html @@ -141,7 +141,7 @@

    workspaces

    Table of contents

    - +

    Description

    @@ -264,6 +264,11 @@

    Running commands in the c
    npm run test --workspaces
     

    Will run the test script in both ./packages/a and ./packages/b.

    +

    Ignoring missing scripts

    +

    It is not required for all of the workspaces to implement scripts run with the npm run command.

    +

    By running the command with the --if-present flag, npm will ignore workspaces missing target script.

    +
    npm run test --workspaces --if-present
    +

    See also

    • npm install
    • diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js index 6611763978e611..1589ff589c38e2 100644 --- a/deps/npm/lib/install.js +++ b/deps/npm/lib/install.js @@ -8,6 +8,8 @@ const log = require('npmlog') const { resolve, join } = require('path') const Arborist = require('@npmcli/arborist') const runScript = require('@npmcli/run-script') +const pacote = require('pacote') +const checks = require('npm-install-checks') const ArboristWorkspaceCmd = require('./workspaces/arborist-cmd.js') class Install extends ArboristWorkspaceCmd { @@ -126,6 +128,23 @@ class Install extends ArboristWorkspaceCmd { const ignoreScripts = this.npm.config.get('ignore-scripts') const isGlobalInstall = this.npm.config.get('global') const where = isGlobalInstall ? globalTop : this.npm.prefix + const forced = this.npm.config.get('force') + const isDev = this.npm.config.get('dev') + const scriptShell = this.npm.config.get('script-shell') || undefined + + // be very strict about engines when trying to update npm itself + const npmInstall = args.find(arg => arg.startsWith('npm@') || arg === 'npm') + if (isGlobalInstall && npmInstall) { + const npmManifest = await pacote.manifest(npmInstall) + try { + checks.checkEngine(npmManifest, npmManifest.version, process.version) + } catch (e) { + if (forced) + this.npm.log.warn('install', `Forcing global npm install with incompatible version ${npmManifest.version} into node ${process.version}`) + else + throw e + } + } // don't try to install the prefix into itself args = args.filter(a => resolve(a) !== this.npm.prefix) @@ -135,7 +154,7 @@ class Install extends ArboristWorkspaceCmd { args = ['.'] // TODO: Add warnings for other deprecated flags? or remove this one? - if (this.npm.config.get('dev')) + if (isDev) log.warn('install', 'Usage of the `--dev` option is deprecated. Use `--include=dev` instead.') const opts = { @@ -150,7 +169,6 @@ class Install extends ArboristWorkspaceCmd { await arb.reify(opts) if (!args.length && !isGlobalInstall && !ignoreScripts) { - const scriptShell = this.npm.config.get('script-shell') || undefined const scripts = [ 'preinstall', 'install', diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js index da97195dd04f07..6e12bcb918eef8 100644 --- a/deps/npm/lib/utils/error-message.js +++ b/deps/npm/lib/utils/error-message.js @@ -181,7 +181,7 @@ module.exports = (er, npm) => { const pkg = er.pkgid.replace(/(?!^)@.*$/, '') detail.push(['404', '']) - detail.push(['404', '', "'" + er.pkgid + "' is not in the npm registry."]) + detail.push(['404', '', `'${replaceInfo(er.pkgid)}' is not in this registry.`]) const valResult = nameValidator(pkg) diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index 0039adcfbf739d..19a23676ded35f 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ACCESS" "1" "August 2021" "" "" +.TH "NPM\-ACCESS" "1" "September 2021" "" "" .SH "NAME" \fBnpm-access\fR \- Set access level on published packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index 59d9a5a4e43631..d413f5abcbdb10 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ADDUSER" "1" "August 2021" "" "" +.TH "NPM\-ADDUSER" "1" "September 2021" "" "" .SH "NAME" \fBnpm-adduser\fR \- Add a registry user account .SS Synopsis diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index 404061180176b8..f2a56e3e5e1cc7 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM\-AUDIT" "1" "August 2021" "" "" +.TH "NPM\-AUDIT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-audit\fR \- Run a security audit .SS Synopsis diff --git a/deps/npm/man/man1/npm-bin.1 b/deps/npm/man/man1/npm-bin.1 index b8d47968145607..19b1ce3fea5dd5 100644 --- a/deps/npm/man/man1/npm-bin.1 +++ b/deps/npm/man/man1/npm-bin.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BIN" "1" "August 2021" "" "" +.TH "NPM\-BIN" "1" "September 2021" "" "" .SH "NAME" \fBnpm-bin\fR \- Display npm bin folder .SS Synopsis diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index 101ff35e096ed4..5be9bba84b8679 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM\-BUGS" "1" "August 2021" "" "" +.TH "NPM\-BUGS" "1" "September 2021" "" "" .SH "NAME" \fBnpm-bugs\fR \- Report bugs for a package in a web browser .SS Synopsis diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index fcb2981944777f..217d9e134bcd16 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM\-CACHE" "1" "August 2021" "" "" +.TH "NPM\-CACHE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-cache\fR \- Manipulates packages cache .SS Synopsis diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index ccc462be57f530..87b62f0cf2a4f8 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM\-CI" "1" "August 2021" "" "" +.TH "NPM\-CI" "1" "September 2021" "" "" .SH "NAME" \fBnpm-ci\fR \- Install a project with a clean slate .SS Synopsis diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index 41fd2ae6037697..54a54e0a66e826 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM\-COMPLETION" "1" "August 2021" "" "" +.TH "NPM\-COMPLETION" "1" "September 2021" "" "" .SH "NAME" \fBnpm-completion\fR \- Tab Completion for npm .SS Synopsis diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index d7dda135292905..1189328e3b3845 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM\-CONFIG" "1" "August 2021" "" "" +.TH "NPM\-CONFIG" "1" "September 2021" "" "" .SH "NAME" \fBnpm-config\fR \- Manage the npm configuration files .SS Synopsis diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index e4344ec0a29d77..0a9b62f7f2cd3b 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DEDUPE" "1" "August 2021" "" "" +.TH "NPM\-DEDUPE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-dedupe\fR \- Reduce duplication in the package tree .SS Synopsis diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index dbcfe389a2f249..83d2345b046399 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DEPRECATE" "1" "August 2021" "" "" +.TH "NPM\-DEPRECATE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-deprecate\fR \- Deprecate a version of a package .SS Synopsis @@ -86,6 +86,6 @@ npm help registry .IP \(bu 2 npm help owner .IP \(bu 2 -npm help owner +npm help adduser .RE diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index ecf97b316265ac..afb363733953b5 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DIFF" "1" "August 2021" "" "" +.TH "NPM\-DIFF" "1" "September 2021" "" "" .SH "NAME" \fBnpm-diff\fR \- The registry diff command .SS Synopsis diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index edc4eace5ed0ca..5f019fa163235c 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DIST\-TAG" "1" "August 2021" "" "" +.TH "NPM\-DIST\-TAG" "1" "September 2021" "" "" .SH "NAME" \fBnpm-dist-tag\fR \- Modify package distribution tags .SS Synopsis diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index aa336e07883215..75a0fbc944104e 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DOCS" "1" "August 2021" "" "" +.TH "NPM\-DOCS" "1" "September 2021" "" "" .SH "NAME" \fBnpm-docs\fR \- Open documentation for a package in a web browser .SS Synopsis diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index bd27ec0b9996a3..ab3542bacd86b1 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM\-DOCTOR" "1" "August 2021" "" "" +.TH "NPM\-DOCTOR" "1" "September 2021" "" "" .SH "NAME" \fBnpm-doctor\fR \- Check your npm environment .SS Synopsis diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index be8b624c7d9315..c5bf4648197e01 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EDIT" "1" "August 2021" "" "" +.TH "NPM\-EDIT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-edit\fR \- Edit an installed package .SS Synopsis diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 7543e2218061df..ebbc1b6e2ef97f 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EXEC" "1" "August 2021" "" "" +.TH "NPM\-EXEC" "1" "September 2021" "" "" .SH "NAME" \fBnpm-exec\fR \- Run a command from a local or remote npm package .SS Synopsis diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 90ec6eb458c771..67fc54527862b7 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EXPLAIN" "1" "August 2021" "" "" +.TH "NPM\-EXPLAIN" "1" "September 2021" "" "" .SH "NAME" \fBnpm-explain\fR \- Explain installed packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 0f7ef716ffd42e..3834eb6ac928c8 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM\-EXPLORE" "1" "August 2021" "" "" +.TH "NPM\-EXPLORE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-explore\fR \- Browse an installed package .SS Synopsis diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 82bc51c97d8d2c..35314bfe8f2e90 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM\-FIND\-DUPES" "1" "August 2021" "" "" +.TH "NPM\-FIND\-DUPES" "1" "September 2021" "" "" .SH "NAME" \fBnpm-find-dupes\fR \- Find duplication in the package tree .SS Synopsis diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 8beb4cca2677c8..ef42eeb69d121e 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM\-FUND" "1" "August 2021" "" "" +.TH "NPM\-FUND" "1" "September 2021" "" "" .SH "NAME" \fBnpm-fund\fR \- Retrieve funding information .SS Synopsis diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 5d7bd8fd58e731..52704c9f8b8e14 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM\-HELP\-SEARCH" "1" "August 2021" "" "" +.TH "NPM\-HELP\-SEARCH" "1" "September 2021" "" "" .SH "NAME" \fBnpm-help-search\fR \- Search npm help documentation .SS Synopsis diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 6d7fafc4b819b0..0f5d3612e0a8ec 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM\-HELP" "1" "August 2021" "" "" +.TH "NPM\-HELP" "1" "September 2021" "" "" .SH "NAME" \fBnpm-help\fR \- Get help on npm .SS Synopsis diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index 6135a720b51f05..f36a43dfc471dd 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM\-HOOK" "1" "August 2021" "" "" +.TH "NPM\-HOOK" "1" "September 2021" "" "" .SH "NAME" \fBnpm-hook\fR \- Manage registry hooks .SS Synopsis diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 010f06a88332f1..be2d7a0439be86 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INIT" "1" "August 2021" "" "" +.TH "NPM\-INIT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-init\fR \- Create a package\.json file .SS Synopsis diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 922ea545d1f12e..4c51f47da62f0b 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INSTALL\-CI\-TEST" "1" "August 2021" "" "" +.TH "NPM\-INSTALL\-CI\-TEST" "1" "September 2021" "" "" .SH "NAME" \fBnpm-install-ci-test\fR \- Install a project with a clean slate and run tests .SS Synopsis diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index b1e4a43ef84464..ee788cf70a72e2 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INSTALL\-TEST" "1" "August 2021" "" "" +.TH "NPM\-INSTALL\-TEST" "1" "September 2021" "" "" .SH "NAME" \fBnpm-install-test\fR \- Install package(s) and run tests .SS Synopsis diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 3ebcf9c04ba1a5..420aed1b082c4b 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM\-INSTALL" "1" "August 2021" "" "" +.TH "NPM\-INSTALL" "1" "September 2021" "" "" .SH "NAME" \fBnpm-install\fR \- Install a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index 2c6d3a86e6a615..81cfd87afdc1b9 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LINK" "1" "August 2021" "" "" +.TH "NPM\-LINK" "1" "September 2021" "" "" .SH "NAME" \fBnpm-link\fR \- Symlink a package folder .SS Synopsis diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 05c1e8c94c9cfb..945e1aa45a23dc 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LOGOUT" "1" "August 2021" "" "" +.TH "NPM\-LOGOUT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-logout\fR \- Log out of the registry .SS Synopsis diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 00d30a51c933cd..8ff55b216f2a4b 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM\-LS" "1" "August 2021" "" "" +.TH "NPM\-LS" "1" "September 2021" "" "" .SH "NAME" \fBnpm-ls\fR \- List installed packages .SS Synopsis @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@7\.21\.1 /path/to/npm +npm@7\.23\.0 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 4ad678e630c38d..9eb657a80f2de5 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ORG" "1" "August 2021" "" "" +.TH "NPM\-ORG" "1" "September 2021" "" "" .SH "NAME" \fBnpm-org\fR \- Manage orgs .SS Synopsis diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index 20a06266e250a3..b760bd3af1e8cb 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM\-OUTDATED" "1" "August 2021" "" "" +.TH "NPM\-OUTDATED" "1" "September 2021" "" "" .SH "NAME" \fBnpm-outdated\fR \- Check for outdated packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index 12ec156e5da657..a73cb2c1872b8f 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM\-OWNER" "1" "August 2021" "" "" +.TH "NPM\-OWNER" "1" "September 2021" "" "" .SH "NAME" \fBnpm-owner\fR \- Manage package owners .SS Synopsis diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index a51ed066536a64..da097d402d0cd3 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PACK" "1" "August 2021" "" "" +.TH "NPM\-PACK" "1" "September 2021" "" "" .SH "NAME" \fBnpm-pack\fR \- Create a tarball from a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index b56a4aa16f1670..5f109a6da3a511 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PING" "1" "August 2021" "" "" +.TH "NPM\-PING" "1" "September 2021" "" "" .SH "NAME" \fBnpm-ping\fR \- Ping npm registry .SS Synopsis diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 0eec1ed55e8473..ba88b03e654bb8 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PKG" "1" "August 2021" "" "" +.TH "NPM\-PKG" "1" "September 2021" "" "" .SH "NAME" \fBnpm-pkg\fR \- Manages your package\.json .SS Synopsis diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index cf0d14ecb9a229..381b349399262e 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PREFIX" "1" "August 2021" "" "" +.TH "NPM\-PREFIX" "1" "September 2021" "" "" .SH "NAME" \fBnpm-prefix\fR \- Display prefix .SS Synopsis diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index e3a909f8440e1c..310d90026a4cec 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PROFILE" "1" "August 2021" "" "" +.TH "NPM\-PROFILE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-profile\fR \- Change settings on your registry profile .SS Synopsis diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index 762a6dd87191d7..0602a7e874ada4 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PRUNE" "1" "August 2021" "" "" +.TH "NPM\-PRUNE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-prune\fR \- Remove extraneous packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 34c8f806f64859..fe3f33450ab252 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM\-PUBLISH" "1" "August 2021" "" "" +.TH "NPM\-PUBLISH" "1" "September 2021" "" "" .SH "NAME" \fBnpm-publish\fR \- Publish a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index 595925449bb24b..05a537c69a16c9 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM\-REBUILD" "1" "August 2021" "" "" +.TH "NPM\-REBUILD" "1" "September 2021" "" "" .SH "NAME" \fBnpm-rebuild\fR \- Rebuild a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 6026a1b2740386..603afd8d3fe4b4 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM\-REPO" "1" "August 2021" "" "" +.TH "NPM\-REPO" "1" "September 2021" "" "" .SH "NAME" \fBnpm-repo\fR \- Open package repository page in the browser .SS Synopsis diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index 5cd3cadc44758e..0192591dea0749 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM\-RESTART" "1" "August 2021" "" "" +.TH "NPM\-RESTART" "1" "September 2021" "" "" .SH "NAME" \fBnpm-restart\fR \- Restart a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 4d04ccab31dba7..a7c41b63102b4b 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM\-ROOT" "1" "August 2021" "" "" +.TH "NPM\-ROOT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-root\fR \- Display npm root .SS Synopsis diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index 02bb61417f3a6e..649a2b1fc87d18 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM\-RUN\-SCRIPT" "1" "August 2021" "" "" +.TH "NPM\-RUN\-SCRIPT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-run-script\fR \- Run arbitrary package scripts .SS Synopsis diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index c1d2a75e6a53ac..e25f0a3cd4f0bf 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM\-SEARCH" "1" "August 2021" "" "" +.TH "NPM\-SEARCH" "1" "September 2021" "" "" .SH "NAME" \fBnpm-search\fR \- Search for packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-set-script.1 b/deps/npm/man/man1/npm-set-script.1 index 50d42f6e24a91f..0521583730493e 100644 --- a/deps/npm/man/man1/npm-set-script.1 +++ b/deps/npm/man/man1/npm-set-script.1 @@ -1,4 +1,4 @@ -.TH "NPM\-SET\-SCRIPT" "1" "August 2021" "" "" +.TH "NPM\-SET\-SCRIPT" "1" "September 2021" "" "" .SH "NAME" \fBnpm-set-script\fR \- Set tasks in the scripts section of package\.json .SS Synopsis diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 4cf505d65ddedd..b0eff04992b7f8 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM\-SHRINKWRAP" "1" "August 2021" "" "" +.TH "NPM\-SHRINKWRAP" "1" "September 2021" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR \- Lock down dependency versions for publication .SS Synopsis diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index 4da5668414b355..f19795184de133 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STAR" "1" "August 2021" "" "" +.TH "NPM\-STAR" "1" "September 2021" "" "" .SH "NAME" \fBnpm-star\fR \- Mark your favorite packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index b45cde640c21a5..71de65a2f58bba 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STARS" "1" "August 2021" "" "" +.TH "NPM\-STARS" "1" "September 2021" "" "" .SH "NAME" \fBnpm-stars\fR \- View packages marked as favorites .SS Synopsis diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 05a81c03344e51..24af98df83816f 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM\-START" "1" "August 2021" "" "" +.TH "NPM\-START" "1" "September 2021" "" "" .SH "NAME" \fBnpm-start\fR \- Start a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 356585874f7d69..a6f8b977ea5c7e 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM\-STOP" "1" "August 2021" "" "" +.TH "NPM\-STOP" "1" "September 2021" "" "" .SH "NAME" \fBnpm-stop\fR \- Stop a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index fc96e8aa488154..ffd620a450ab2d 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM\-TEAM" "1" "August 2021" "" "" +.TH "NPM\-TEAM" "1" "September 2021" "" "" .SH "NAME" \fBnpm-team\fR \- Manage organization teams and team memberships .SS Synopsis diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index 6eb0888077e921..2280aacf2a4779 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM\-TEST" "1" "August 2021" "" "" +.TH "NPM\-TEST" "1" "September 2021" "" "" .SH "NAME" \fBnpm-test\fR \- Test a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index aa0daab47e5a28..119f4279968c78 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM\-TOKEN" "1" "August 2021" "" "" +.TH "NPM\-TOKEN" "1" "September 2021" "" "" .SH "NAME" \fBnpm-token\fR \- Manage your authentication tokens .SS Synopsis diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 4282fe1282288a..85c251ef595823 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UNINSTALL" "1" "August 2021" "" "" +.TH "NPM\-UNINSTALL" "1" "September 2021" "" "" .SH "NAME" \fBnpm-uninstall\fR \- Remove a package .SS Synopsis diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index 81d1e39733d305..682659915c6d8f 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UNPUBLISH" "1" "August 2021" "" "" +.TH "NPM\-UNPUBLISH" "1" "September 2021" "" "" .SH "NAME" \fBnpm-unpublish\fR \- Remove a package from the registry .SS Synopsis diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index 4702cca1a9a2db..d69f398283aa7b 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UNSTAR" "1" "August 2021" "" "" +.TH "NPM\-UNSTAR" "1" "September 2021" "" "" .SH "NAME" \fBnpm-unstar\fR \- Remove an item from your favorite packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index ed8633bd0513dd..5e871f7d57334e 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM\-UPDATE" "1" "August 2021" "" "" +.TH "NPM\-UPDATE" "1" "September 2021" "" "" .SH "NAME" \fBnpm-update\fR \- Update packages .SS Synopsis diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index cdf50de20aaf8b..c91b7094ed9073 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM\-VERSION" "1" "August 2021" "" "" +.TH "NPM\-VERSION" "1" "September 2021" "" "" .SH "NAME" \fBnpm-version\fR \- Bump a package version .SS Synopsis diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index a0bab36f219080..becada49734c69 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM\-VIEW" "1" "August 2021" "" "" +.TH "NPM\-VIEW" "1" "September 2021" "" "" .SH "NAME" \fBnpm-view\fR \- View registry info .SS Synopsis diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index f5dd6718fb2ec5..d7f53fb3257eb2 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM\-WHOAMI" "1" "August 2021" "" "" +.TH "NPM\-WHOAMI" "1" "September 2021" "" "" .SH "NAME" \fBnpm-whoami\fR \- Display npm username .SS Synopsis diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index e915deec084429..e9e11652387d3e 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "August 2021" "" "" +.TH "NPM" "1" "September 2021" "" "" .SH "NAME" \fBnpm\fR \- javascript package manager .SS Synopsis @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -7\.21\.1 +7\.23\.0 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index 982127c8ad6dd5..8eedcc86d5dab9 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "August 2021" "" "" +.TH "NPX" "1" "September 2021" "" "" .SH "NAME" \fBnpx\fR \- Run a command from a local or remote npm package .SS Synopsis diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index de49122f3cec91..d5b0ee7fc129a8 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2021" "" "" +.TH "FOLDERS" "5" "September 2021" "" "" .SH "NAME" \fBfolders\fR \- Folder Structures Used by npm .SS Description diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index 0759e770bea6a2..43bd168c9e1e63 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "August 2021" "" "" +.TH "INSTALL" "5" "September 2021" "" "" .SH "NAME" \fBinstall\fR \- Download and install node and npm .SS Description diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index 2e3aa79adb1d60..dc2ae457691af5 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM\-SHRINKWRAP\.JSON" "5" "August 2021" "" "" +.TH "NPM\-SHRINKWRAP\.JSON" "5" "September 2021" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR \- A publishable lockfile .SS Description diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 84a0ffadbee25a..aa3850d7779fbb 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "August 2021" "" "" +.TH "NPMRC" "5" "September 2021" "" "" .SH "NAME" \fBnpmrc\fR \- The npm config files .SS Description diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index 7d4574186e73f9..7b69312b12416d 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE\.JSON" "5" "August 2021" "" "" +.TH "PACKAGE\.JSON" "5" "September 2021" "" "" .SH "NAME" \fBpackage.json\fR \- Specifics of npm's package\.json handling .SS Description diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 18f968ee6d8d1f..0be5a0548e805c 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE\-LOCK\.JSON" "5" "August 2021" "" "" +.TH "PACKAGE\-LOCK\.JSON" "5" "September 2021" "" "" .SH "NAME" \fBpackage-lock.json\fR \- A manifestation of the manifest .SS Description diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index db69e6bb1fd8ac..c794baedcc11c2 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "August 2021" "" "" +.TH "CONFIG" "7" "September 2021" "" "" .SH "NAME" \fBconfig\fR \- More than you probably want to know about npm configuration .SS Description diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index 9c226dfc3715a4..07c5f0d142696e 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "August 2021" "" "" +.TH "DEVELOPERS" "7" "September 2021" "" "" .SH "NAME" \fBdevelopers\fR \- Developer Guide .SS Description diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index 983a41c9c2daeb..74386a0cab74fd 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "August 2021" "" "" +.TH "ORGS" "7" "September 2021" "" "" .SH "NAME" \fBorgs\fR \- Working with Teams & Orgs .SS Description diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index 07f090af82e869..90fe7a99f9208e 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "August 2021" "" "" +.TH "REGISTRY" "7" "September 2021" "" "" .SH "NAME" \fBregistry\fR \- The JavaScript Package Registry .SS Description diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 7e8a85039b8437..bc591096ae0b7c 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "August 2021" "" "" +.TH "REMOVAL" "7" "September 2021" "" "" .SH "NAME" \fBremoval\fR \- Cleaning the Slate .SS Synopsis diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 6cf9946d01aa44..f35e7b382311c5 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "August 2021" "" "" +.TH "SCOPE" "7" "September 2021" "" "" .SH "NAME" \fBscope\fR \- Scoped packages .SS Description diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 47e5879639b814..4e688ccc849bd8 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "August 2021" "" "" +.TH "SCRIPTS" "7" "September 2021" "" "" .SH "NAME" \fBscripts\fR \- How npm handles the "scripts" field .SS Description diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 0e6e4c410cc3af..d84c9f59726b15 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "August 2021" "" "" +.TH "WORKSPACES" "7" "September 2021" "" "" .SH "NAME" \fBworkspaces\fR \- Working with workspaces .SS Description @@ -189,6 +189,17 @@ npm run test \-\-workspaces .RE .P Will run the \fBtest\fP script in both \fB\|\./packages/a\fP and \fB\|\./packages/b\fP\|\. +.SS Ignoring missing scripts +.P +It is not required for all of the workspaces to implement scripts run with the \fBnpm run\fP command\. +.P +By running the command with the \fB\-\-if\-present\fP flag, npm will ignore workspaces missing target script\. +.P +.RS 2 +.nf +npm run test \-\-workspaces \-\-if\-present +.fi +.RE .SS See also .RS 0 .IP \(bu 2 diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/actual.js b/deps/npm/node_modules/@npmcli/arborist/bin/actual.js index ef254e1d4133d0..eb0495997a1b97 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/actual.js @@ -7,12 +7,14 @@ require('./lib/timers.js') const start = process.hrtime() new Arborist(options).loadActual(options).then(tree => { const end = process.hrtime(start) - if (!process.argv.includes('--quiet')) + if (!process.argv.includes('--quiet')) { print(tree) + } console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) - if (options.save) + if (options.save) { tree.meta.save() + } if (options.saveHidden) { tree.meta.hiddenLockfile = true tree.meta.filename = options.path + '/node_modules/.package-lock.json' diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/audit.js b/deps/npm/node_modules/@npmcli/arborist/bin/audit.js index 5075724e2d471e..d9ac532d3ed704 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/audit.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/audit.js @@ -7,12 +7,14 @@ require('./lib/logging.js') const Vuln = require('../lib/vuln.js') const printReport = report => { - for (const vuln of report.values()) + for (const vuln of report.values()) { console.log(printVuln(vuln)) + } if (report.topVulns.size) { console.log('\n# top-level vulnerabilities') - for (const vuln of report.topVulns.values()) + for (const vuln of report.topVulns.values()) { console.log(printVuln(vuln)) + } } } @@ -37,12 +39,16 @@ const arb = new Arborist(options) arb.audit(options).then(tree => { process.emit('timeEnd', 'audit script') const end = process.hrtime(start) - if (options.fix) + if (options.fix) { print(tree) - if (!options.quiet) + } + if (!options.quiet) { printReport(arb.auditReport) - if (options.fix) + } + if (options.fix) { console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) + } + if (tree.meta && options.save) { tree.meta.save() + } }).catch(er => console.error(er)) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/dedupe.js b/deps/npm/node_modules/@npmcli/arborist/bin/dedupe.js index 96f754e34ca9ee..b0e83459ef73fb 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/dedupe.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/dedupe.js @@ -10,8 +10,9 @@ const printDiff = diff => { depth({ tree: diff, visit: d => { - if (d.location === '') + if (d.location === '') { return + } switch (d.action) { case 'REMOVE': console.error('REMOVE', d.actual.location) @@ -38,9 +39,11 @@ arb.dedupe(options).then(tree => { process.emit('timeEnd', 'install') const end = process.hrtime(start) print(tree) - if (options.dryRun) + if (options.dryRun) { printDiff(arb.diff) + } console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) + if (tree.meta && options.save) { tree.meta.save() + } }).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/funding.js b/deps/npm/node_modules/@npmcli/arborist/bin/funding.js index fa1237e87e98a0..d0f4f31654ae0e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/funding.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/funding.js @@ -19,13 +19,15 @@ a.loadVirtual().then(tree => { const end = process.hrtime(start) if (!query) { for (const node of tree.inventory.values()) { - if (node.package.funding) + if (node.package.funding) { console.log(node.name, node.location, node.package.funding) + } } } else { for (const node of tree.inventory.query('name', query)) { - if (node.package.funding) + if (node.package.funding) { console.log(node.name, node.location, node.package.funding) + } } } console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/ideal.js b/deps/npm/node_modules/@npmcli/arborist/bin/ideal.js index 74d79ce0a51e96..5d1ed0dcd9dc62 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/ideal.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/ideal.js @@ -11,8 +11,9 @@ new Arborist(options).buildIdealTree(options).then(tree => { const end = process.hrtime(start) print(tree) console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 10e9}s`) - if (tree.meta && options.save) + if (tree.meta && options.save) { tree.meta.save() + } }).catch(er => { const opt = { depth: Infinity, color: true } console.error(er.code === 'ERESOLVE' ? inspect(er, opt) : er) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/lib/logging.js b/deps/npm/node_modules/@npmcli/arborist/bin/lib/logging.js index 9420bca3c320cd..8183ece1fd1191 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/lib/logging.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/lib/logging.js @@ -24,12 +24,13 @@ const colors = process.stderr.isTTY const magenta = colors ? msg => `\x1B[35m${msg}\x1B[39m` : m => m if (loglevel !== 'silent') { process.on('log', (level, ...args) => { - if (levelMap.get(level) < levelMap.get(loglevel)) + if (levelMap.get(level) < levelMap.get(loglevel)) { return + } const pref = `${process.pid} ${magenta(level)} ` - if (level === 'warn' && args[0] === 'ERESOLVE') + if (level === 'warn' && args[0] === 'ERESOLVE') { args[2] = inspect(args[2], { depth: 10, colors }) - else { + } else { args = args.map(a => { return typeof a === 'string' ? a : inspect(a, { depth: 10, colors }) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/lib/options.js b/deps/npm/node_modules/@npmcli/arborist/bin/lib/options.js index a1b6719627f506..23e89ddce698bf 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/lib/options.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/lib/options.js @@ -11,17 +11,17 @@ for (const arg of process.argv.slice(2)) { } else if (/^--rm=/.test(arg)) { options.rm = options.rm || [] options.rm.push(arg.substr('--rm='.length)) - } else if (arg === '--global') + } else if (arg === '--global') { options.global = true - else if (arg === '--global-style') + } else if (arg === '--global-style') { options.globalStyle = true - else if (arg === '--prefer-dedupe') + } else if (arg === '--prefer-dedupe') { options.preferDedupe = true - else if (arg === '--legacy-peer-deps') + } else if (arg === '--legacy-peer-deps') { options.legacyPeerDeps = true - else if (arg === '--force') + } else if (arg === '--force') { options.force = true - else if (arg === '--update-all') { + } else if (arg === '--update-all') { options.update = options.update || {} options.update.all = true } else if (/^--update=/.test(arg)) { @@ -31,9 +31,9 @@ for (const arg of process.argv.slice(2)) { } else if (/^--omit=/.test(arg)) { options.omit = options.omit || [] options.omit.push(arg.substr('--omit='.length)) - } else if (/^--before=/.test(arg)) + } else if (/^--before=/.test(arg)) { options.before = new Date(arg.substr('--before='.length)) - else if (/^-w.+/.test(arg)) { + } else if (/^-w.+/.test(arg)) { options.workspaces = options.workspaces || [] options.workspaces.push(arg.replace(/^-w/, '')) } else if (/^--workspace=/.test(arg)) { @@ -43,15 +43,17 @@ for (const arg of process.argv.slice(2)) { const [key, ...v] = arg.replace(/^--/, '').split('=') const val = v.join('=') options[key] = val === 'false' ? false : val === 'true' ? true : val - } else if (/^--.+/.test(arg)) + } else if (/^--.+/.test(arg)) { options[arg.replace(/^--/, '')] = true - else if (options.path === undefined) + } else if (options.path === undefined) { options.path = arg - else + } else { options._.push(arg) + } } -if (options.path === undefined) +if (options.path === undefined) { options.path = '.' +} console.error(options) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/lib/timers.js b/deps/npm/node_modules/@npmcli/arborist/bin/lib/timers.js index b516af92c5b57f..242431980e55c4 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/lib/timers.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/lib/timers.js @@ -3,21 +3,24 @@ const { format } = require('util') const options = require('./options.js') process.on('time', name => { - if (timers[name]) + if (timers[name]) { throw new Error('conflicting timer! ' + name) + } timers[name] = process.hrtime() }) const dim = process.stderr.isTTY ? msg => `\x1B[2m${msg}\x1B[22m` : m => m const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m process.on('timeEnd', name => { - if (!timers[name]) + if (!timers[name]) { throw new Error('timer not started! ' + name) + } const res = process.hrtime(timers[name]) delete timers[name] const msg = format(`${process.pid} ${name}`, res[0] * 1e3 + res[1] / 1e6) - if (options.timers !== false) + if (options.timers !== false) { console.error(dim(msg)) + } }) process.on('exit', () => { diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/license.js b/deps/npm/node_modules/@npmcli/arborist/bin/license.js index 89d0d879036b0c..7fc08dd83eb5b1 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/license.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/license.js @@ -8,27 +8,31 @@ const query = options._.shift() a.loadVirtual().then(tree => { // only load the actual tree if the virtual one doesn't have modern metadata - if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) + if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { throw 'load actual' - else + } else { return tree + } }).catch((er) => { console.error('loading actual tree', er) return a.loadActual() }).then(tree => { if (!query) { const set = [] - for (const license of tree.inventory.query('license')) + for (const license of tree.inventory.query('license')) { set.push([tree.inventory.query('license', license).size, license]) + } for (const [count, license] of set.sort((a, b) => a[1] && b[1] ? b[0] - a[0] || a[1].localeCompare(b[1], 'en') : a[1] ? -1 : b[1] ? 1 - : 0)) + : 0)) { console.log(count, license) + } } else { - for (const node of tree.inventory.query('license', query === 'undefined' ? undefined : query)) + for (const node of tree.inventory.query('license', query === 'undefined' ? undefined : query)) { console.log(`${node.name} ${node.location} ${node.package.description || ''}`) + } } }) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/prune.js b/deps/npm/node_modules/@npmcli/arborist/bin/prune.js index 357dbcaafa03f8..4809a992388aa5 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/prune.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/prune.js @@ -10,8 +10,9 @@ const printDiff = diff => { depth({ tree: diff, visit: d => { - if (d.location === '') + if (d.location === '') { return + } switch (d.action) { case 'REMOVE': console.error('REMOVE', d.actual.location) @@ -38,9 +39,11 @@ arb.prune(options).then(tree => { process.emit('timeEnd', 'install') const end = process.hrtime(start) print(tree) - if (options.dryRun) + if (options.dryRun) { printDiff(arb.diff) + } console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) + if (tree.meta && options.save) { tree.meta.save() + } }).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/reify.js b/deps/npm/node_modules/@npmcli/arborist/bin/reify.js index d17a0e03b3286a..803bac978c69aa 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/reify.js @@ -10,8 +10,9 @@ const printDiff = diff => { depth({ tree: diff, visit: d => { - if (d.location === '') + if (d.location === '') { return + } switch (d.action) { case 'REMOVE': console.error('REMOVE', d.actual.location) @@ -38,9 +39,11 @@ arb.reify(options).then(tree => { process.emit('timeEnd', 'install') const end = process.hrtime(start) print(tree) - if (options.dryRun) + if (options.dryRun) { printDiff(arb.diff) + } console.error(`resolved ${tree.inventory.size} deps in ${end[0] + end[1] / 1e9}s`) - if (tree.meta && options.save) + if (tree.meta && options.save) { tree.meta.save() + } }).catch(er => console.error(require('util').inspect(er, { depth: Infinity }))) diff --git a/deps/npm/node_modules/@npmcli/arborist/bin/virtual.js b/deps/npm/node_modules/@npmcli/arborist/bin/virtual.js index 3352802c2de873..457c945e72c211 100644 --- a/deps/npm/node_modules/@npmcli/arborist/bin/virtual.js +++ b/deps/npm/node_modules/@npmcli/arborist/bin/virtual.js @@ -8,9 +8,11 @@ require('./lib/timers.js') const start = process.hrtime() new Arborist(options).loadVirtual().then(tree => { const end = process.hrtime(start) - if (!options.quiet) + if (!options.quiet) { print(tree) - if (options.save) + } + if (options.save) { tree.meta.save() + } console.error(`read ${tree.inventory.size} deps in ${end[0] * 1000 + end[1] / 1e6}ms`) }).catch(er => console.error(er)) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js b/deps/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js index f78a43319be8cb..c1b64a461af8ac 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js @@ -1,8 +1,9 @@ // add and remove dependency specs to/from pkg manifest const add = ({pkg, add, saveBundle, saveType, log}) => { - for (const spec of add) + for (const spec of add) { addSingle({pkg, spec, saveBundle, saveType, log}) + } return pkg } @@ -24,8 +25,9 @@ const addSingle = ({pkg, spec, saveBundle, saveType, log}) => { // to keep based on the same order of priority we do when // building the tree as defined in the _loadDeps method of // the node class. - if (!saveType) + if (!saveType) { saveType = inferSaveType(pkg, spec.name) + } if (saveType === 'prod') { // a production dependency can only exist as production (rpj ensures it @@ -48,8 +50,9 @@ const addSingle = ({pkg, spec, saveBundle, saveType, log}) => { const depType = saveTypeMap.get(saveType) pkg[depType] = pkg[depType] || {} - if (rawSpec !== '' || pkg[depType][name] === undefined) + if (rawSpec !== '' || pkg[depType][name] === undefined) { pkg[depType][name] = rawSpec || '*' + } if (saveType === 'optional') { // Affordance for previous npm versions that require this behaviour pkg.dependencies = pkg.dependencies || {} @@ -58,17 +61,18 @@ const addSingle = ({pkg, spec, saveBundle, saveType, log}) => { if (saveType === 'peer' || saveType === 'peerOptional') { const pdm = pkg.peerDependenciesMeta || {} - if (saveType === 'peer' && pdm[name] && pdm[name].optional) + if (saveType === 'peer' && pdm[name] && pdm[name].optional) { pdm[name].optional = false - else if (saveType === 'peerOptional') { + } else if (saveType === 'peerOptional') { pdm[name] = pdm[name] || {} pdm[name].optional = true pkg.peerDependenciesMeta = pdm } // peerDeps are often also a devDep, so that they can be tested when // using package managers that don't auto-install peer deps - if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) + if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) { pkg.devDependencies[name] = pkg.peerDependencies[name] + } } if (saveBundle && saveType !== 'peer' && saveType !== 'peerOptional') { @@ -87,47 +91,54 @@ const inferSaveType = (pkg, name) => { saveType === 'peerOptional' && (!hasSubKey(pkg, 'peerDependenciesMeta', name) || !pkg.peerDependenciesMeta[name].optional) - ) + ) { return 'peer' + } return saveType } } return 'prod' } +const { hasOwnProperty } = Object.prototype const hasSubKey = (pkg, depType, name) => { - return pkg[depType] && Object.prototype.hasOwnProperty.call(pkg[depType], name) + return pkg[depType] && hasOwnProperty.call(pkg[depType], name) } // Removes a subkey and warns about it if it's being replaced const deleteSubKey = (pkg, depType, name, replacedBy, log) => { if (hasSubKey(pkg, depType, name)) { - if (replacedBy && log) + if (replacedBy && log) { log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`) + } delete pkg[depType][name] - // clean up peerDependenciesMeta if we are removing something from peerDependencies + // clean up peerDepsMeta if we are removing something from peerDependencies if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) { delete pkg.peerDependenciesMeta[name] - if (!Object.keys(pkg.peerDependenciesMeta).length) + if (!Object.keys(pkg.peerDependenciesMeta).length) { delete pkg.peerDependenciesMeta + } } - if (!Object.keys(pkg[depType]).length) + if (!Object.keys(pkg[depType]).length) { delete pkg[depType] + } } } const rm = (pkg, rm) => { for (const depType of new Set(saveTypeMap.values())) { - for (const name of rm) + for (const name of rm) { deleteSubKey(pkg, depType, name) + } } if (pkg.bundleDependencies) { pkg.bundleDependencies = pkg.bundleDependencies .filter(name => !rm.includes(name)) - if (!pkg.bundleDependencies.length) + if (!pkg.bundleDependencies.length) { delete pkg.bundleDependencies + } } return pkg } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/audit.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/audit.js index bf1c335e753639..c0cd79bb13e360 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/audit.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/audit.js @@ -22,8 +22,9 @@ module.exports = cls => class Auditor extends cls { process.emit('time', 'audit') const tree = await this.loadVirtual() - if (this[_workspaces] && this[_workspaces].length) + if (this[_workspaces] && this[_workspaces].length) { options.filterSet = this.workspaceDependencySet(tree, this[_workspaces]) + } this.auditReport = await AuditReport.load(tree, options) const ret = options.fix ? this.reify(options) : this.auditReport process.emit('timeEnd', 'audit') diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index cda7f8acfb5175..c45024d16e86b7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -137,8 +137,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { this[_globalStyle] = this[_global] || globalStyle this[_follow] = !!follow - if (this[_workspaces].length && this[_global]) + if (this[_workspaces].length && this[_global]) { throw new Error('Cannot operate on workspaces in global mode') + } this[_explicitRequests] = new Set() this[_preferDedupe] = false @@ -168,18 +169,21 @@ module.exports = cls => class IdealTreeBuilder extends cls { // public method async buildIdealTree (options = {}) { - if (this.idealTree) + if (this.idealTree) { return Promise.resolve(this.idealTree) + } // allow the user to set reify options on the ctor as well. // XXX: deprecate separate reify() options object. options = { ...this.options, ...options } // an empty array or any falsey value is the same as null - if (!options.add || options.add.length === 0) + if (!options.add || options.add.length === 0) { options.add = null - if (!options.rm || options.rm.length === 0) + } + if (!options.rm || options.rm.length === 0) { options.rm = null + } process.emit('time', 'idealTree') @@ -230,11 +234,12 @@ module.exports = cls => class IdealTreeBuilder extends cls { [_checkEngine] (node) { const { engineStrict, npmVersion, nodeVersion } = this.options - const c = () => checkEngine(node.package, npmVersion, nodeVersion, this[_force]) + const c = () => + checkEngine(node.package, npmVersion, nodeVersion, this[_force]) - if (engineStrict) + if (engineStrict) { c() - else { + } else { try { c() } catch (er) { @@ -252,8 +257,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { : Array.isArray(options.update) ? { names: options.update } : options.update || {} - if (update.all || !Array.isArray(update.names)) + if (update.all || !Array.isArray(update.names)) { update.names = [] + } this[_complete] = !!options.complete this[_preferDedupe] = !!options.preferDedupe @@ -283,8 +289,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { : rpj(this.path + '/package.json').then( pkg => this[_rootNodeFromPackage](pkg), er => { - if (er.code === 'EJSONPARSE') + if (er.code === 'EJSONPARSE') { throw er + } return this[_rootNodeFromPackage]({}) } )) @@ -312,8 +319,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { // even though we didn't load it from a package-lock.json FILE, // we still loaded it "from disk", meaning we have to reset // dep flags before assuming that any mutations were reflected. - if (tree.children.size) + if (tree.children.size) { root.meta.loadedFromDisk = true + } } return root }) @@ -382,9 +390,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { process.emit('time', 'idealTree:userRequests') const tree = this.idealTree.target - if (!this[_workspaces].length) + if (!this[_workspaces].length) { await this[_applyUserRequestsToNode](tree, options) - else { + } else { await Promise.all(this.workspaceNodes(tree, this[_workspaces]) .map(node => this[_applyUserRequestsToNode](node, options))) } @@ -396,8 +404,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { // If we have a list of package names to update, and we know it's // going to update them wherever they are, add any paths into those // named nodes to the buildIdealTree queue. - if (!this[_global] && this[_updateNames].length) + if (!this[_global] && this[_updateNames].length) { this[_queueNamedUpdates]() + } // global updates only update the globalTop nodes, but we need to know // that they're there, and not reinstall the world unnecessarily. @@ -408,46 +417,55 @@ module.exports = cls => class IdealTreeBuilder extends cls { tree.package.dependencies = tree.package.dependencies || {} const updateName = this[_updateNames].includes(name) if (this[_updateAll] || updateName) { - if (updateName) + if (updateName) { globalExplicitUpdateNames.push(name) + } const dir = resolve(nm, name) - const st = await lstat(dir).catch(/* istanbul ignore next */ er => null) + const st = await lstat(dir) + .catch(/* istanbul ignore next */ er => null) if (st && st.isSymbolicLink()) { const target = await readlink(dir) const real = resolve(dirname(dir), target) tree.package.dependencies[name] = `file:${real}` - } else + } else { tree.package.dependencies[name] = '*' + } } } } - if (this.auditReport && this.auditReport.size > 0) + if (this.auditReport && this.auditReport.size > 0) { await this[_queueVulnDependents](options) + } const { add, rm } = options if (rm && rm.length) { addRmPkgDeps.rm(tree.package, rm) - for (const name of rm) + for (const name of rm) { this[_explicitRequests].add({ from: tree, name, action: 'DELETE' }) + } } - if (add && add.length) + if (add && add.length) { await this[_add](tree, options) + } // triggers a refresh of all edgesOut. this has to be done BEFORE // adding the edges to explicitRequests, because the package setter // resets all edgesOut. - if (add && add.length || rm && rm.length || this[_global]) + if (add && add.length || rm && rm.length || this[_global]) { tree.package = tree.package + } for (const spec of this[_resolvedAdd]) { - if (spec.tree === tree) + if (spec.tree === tree) { this[_explicitRequests].add(tree.edgesOut.get(spec.name)) + } } - for (const name of globalExplicitUpdateNames) + for (const name of globalExplicitUpdateNames) { this[_explicitRequests].add(tree.edgesOut.get(name)) + } this[_depsQueue].push(tree) } @@ -487,21 +505,24 @@ module.exports = cls => class IdealTreeBuilder extends cls { // if it's an explicit tag, we need to install that specific tag version const isTag = spec.rawSpec && spec.type === 'tag' - if (spec.name && !isTag) + if (spec.name && !isTag) { return spec + } const mani = await pacote.manifest(spec, { ...this.options }) // if it's a tag type, then we need to run it down to an actual version - if (isTag) + if (isTag) { return npa(`${mani.name}@${mani.version}`) + } spec.name = mani.name return spec } async [_updateFilePath] (spec) { - if (spec.type === 'file') + if (spec.type === 'file') { return this[_getRelpathSpec](spec, spec.fetchSpec) + } return spec } @@ -601,8 +622,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { nodesTouched.add(node) } } - for (const node of nodesTouched) + for (const node of nodesTouched) { node.package = node.package + } } } @@ -611,11 +633,13 @@ module.exports = cls => class IdealTreeBuilder extends cls { } [_avoidRange] (name) { - if (!this.auditReport) + if (!this.auditReport) { return null + } const vuln = this.auditReport.get(name) - if (!vuln) + if (!vuln) { return null + } return vuln.range } @@ -652,8 +676,9 @@ module.exports = cls => class IdealTreeBuilder extends cls { const ancient = meta.ancientLockfile const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) - if (inventory.size === 0 || !ancient && !old) + if (inventory.size === 0 || !ancient && !old) { return + } // if the lockfile is from node v5 or earlier, then we'll have to reload // all the manifests of everything we encounter. this is costly, but at @@ -672,8 +697,9 @@ This is a one-time fix-up, please be patient... this.addTracker('idealTree:inflate') const queue = [] for (const node of inventory.values()) { - if (node.isProjectRoot) + if (node.isProjectRoot) { continue + } queue.push(async () => { this.log.silly('inflate', node.location) @@ -738,8 +764,9 @@ This is a one-time fix-up, please be patient... this[_currentDep] = null } - if (!this[_depsQueue].length) + if (!this[_depsQueue].length) { return this[_resolveLinks]() + } // sort physically shallower deps up to the front of the queue, // because they'll affect things deeper in, then alphabetical @@ -757,8 +784,9 @@ This is a one-time fix-up, please be patient... // satisfied by whatever's in that file anyway. if (this[_depsSeen].has(node) || node.root !== this.idealTree || - hasShrinkwrap && !this[_complete]) + hasShrinkwrap && !this[_complete]) { return this[_buildDepStep]() + } this[_depsSeen].add(node) this[_currentDep] = node @@ -841,8 +869,9 @@ This is a one-time fix-up, please be patient... const tasks = [] const peerSource = this[_peerSetSource].get(node) || node for (const edge of this[_problemEdges](node)) { - if (edge.overridden) + if (edge.overridden) { continue + } // peerSetSource is only relevant when we have a peerEntryEdge // otherwise we're setting regular non-peer deps as if they have @@ -878,8 +907,9 @@ This is a one-time fix-up, please be patient... /* istanbul ignore next */ debug(() => { - if (!dep) + if (!dep) { throw new Error('no dep??') + } }) tasks.push({edge, dep}) @@ -912,17 +942,20 @@ This is a one-time fix-up, please be patient... visit: pd => { const { placed, edge, canPlace: cpd } = pd // if we didn't place anything, nothing to do here - if (!placed) + if (!placed) { return + } // we placed something, that means we changed the tree - if (placed.errors.length) + if (placed.errors.length) { this[_loadFailures].add(placed) + } this[_mutateTree] = true if (cpd.canPlaceSelf === OK) { for (const edgeIn of placed.edgesIn) { - if (edgeIn === edge) + if (edgeIn === edge) { continue + } const { from, valid, overridden } = edgeIn if (!overridden && !valid && !this[_depsSeen].has(from)) { this.addTracker('idealTree', from.name, from.location) @@ -936,8 +969,9 @@ This is a one-time fix-up, please be patient... // intentionally causing something to get nested which was // previously placed in this location. for (const edgeIn of placed.edgesIn) { - if (edgeIn === edge) + if (edgeIn === edge) { continue + } const { valid, overridden } = edgeIn if (!valid && !overridden) { @@ -975,8 +1009,9 @@ This is a one-time fix-up, please be patient... } for (const { to } of node.edgesOut.values()) { - if (to && to.isLink && to.target) + if (to && to.isLink && to.target) { this[_linkNodes].add(to) + } } await Promise.all(promises) @@ -1019,8 +1054,9 @@ This is a one-time fix-up, please be patient... if (required.has(edge.from) && edge.type !== 'peerOptional' || secondEdge && ( - required.has(secondEdge.from) && secondEdge.type !== 'peerOptional')) + required.has(secondEdge.from) && secondEdge.type !== 'peerOptional')) { required.add(node) + } // keep track of the thing that caused this node to be included. const src = parent.sourceReference @@ -1030,16 +1066,18 @@ This is a one-time fix-up, please be patient... // otherwise we'll be tempted to put peers as other top-level installed // things, potentially clobbering what's there already, which is not // what we want. the missing edges will be picked up on the next pass. - if (this[_global] && edge.from.isProjectRoot) + if (this[_global] && edge.from.isProjectRoot) { return node + } // otherwise, we have to make sure that our peers can go along with us. return this[_loadPeerSet](node, required) } [_virtualRoot] (node, reuse = false) { - if (reuse && this[_virtualRoots].has(node)) + if (reuse && this[_virtualRoots].has(node)) { return this[_virtualRoots].get(node) + } const vr = new Node({ path: node.realpath, @@ -1081,16 +1119,19 @@ This is a one-time fix-up, please be patient... return [...node.edgesOut.values()] .filter(edge => { // If it's included in a bundle, we take whatever is specified. - if (bundled.has(edge.name)) + if (bundled.has(edge.name)) { return false + } // If it's already been logged as a load failure, skip it. - if (edge.to && this[_loadFailures].has(edge.to)) + if (edge.to && this[_loadFailures].has(edge.to)) { return false + } // If it's shrinkwrapped, we use what the shrinkwap wants. - if (edge.to && edge.to.inShrinkwrap) + if (edge.to && edge.to.inShrinkwrap) { return false + } // If the edge has no destination, that's a problem, unless // if it's peerOptional and not explicitly requested. @@ -1100,20 +1141,24 @@ This is a one-time fix-up, please be patient... } // If the edge has an error, there's a problem. - if (!edge.valid) + if (!edge.valid) { return true + } - // If user has explicitly asked to update this package by name, it's a problem. - if (this[_updateNames].includes(edge.name)) + // user explicitly asked to update this package by name, problem + if (this[_updateNames].includes(edge.name)) { return true + } - // If we're fixing a security vulnerability with this package, it's a problem. - if (this[_isVulnerable](edge.to)) + // fixing a security vulnerability with this package, problem + if (this[_isVulnerable](edge.to)) { return true + } - // If the user has explicitly asked to install this package, it's a "problem". - if (this[_explicitRequests].has(edge)) + // user has explicitly asked to install this package, problem + if (this[_explicitRequests].has(edge)) { return true + } // No problems! return false @@ -1129,9 +1174,9 @@ This is a one-time fix-up, please be patient... // if available and valid. spec = this.idealTree.meta.checkYarnLock(spec, options) - if (this[_manifests].has(spec.raw)) + if (this[_manifests].has(spec.raw)) { return this[_manifests].get(spec.raw) - else { + } else { this.log.silly('fetch manifest', spec.raw) const p = pacote.manifest(spec, options) .then(mani => { @@ -1201,8 +1246,9 @@ This is a one-time fix-up, please be patient... for (const edge of peerEdges) { // already placed this one, and we're happy with it. - if (edge.valid && edge.to) + if (edge.valid && edge.to) { continue + } const parentEdge = node.parent.edgesOut.get(edge.name) const {isProjectRoot, isWorkspace} = node.parent.sourceReference @@ -1223,11 +1269,17 @@ This is a one-time fix-up, please be patient... // a conflict. this is always a problem in strict mode, never // in force mode, and a problem in non-strict mode if this isn't // on behalf of our project. in all such cases, we warn at least. - const dep = await this[_nodeFromEdge](parentEdge, node.parent, edge, required) + const dep = await this[_nodeFromEdge]( + parentEdge, + node.parent, + edge, + required + ) // hooray! that worked! - if (edge.valid) + if (edge.valid) { continue + } // allow it. either we're overriding, or it's not something // that will be installed by default anyway, and we'll fail when @@ -1260,8 +1312,9 @@ This is a one-time fix-up, please be patient... // isn't also required, then there's a good chance we won't need it, // so allow it for now and let it conflict if it turns out to actually // be necessary for the installation. - if (conflictOK || !required.has(edge.from)) + if (conflictOK || !required.has(edge.from)) { continue + } // ok, it's the root, or we're in unforced strict mode, so this is bad this[_failPeerConflict](edge, parentEdge) @@ -1304,15 +1357,17 @@ This is a one-time fix-up, please be patient... this[_linkNodes].delete(link) // link we never ended up placing, skip it - if (link.root !== this.idealTree) + if (link.root !== this.idealTree) { continue + } const tree = this.idealTree.target const external = !link.target.isDescendantOf(tree) // outside the root, somebody else's problem, ignore it - if (external && !this[_follow]) + if (external && !this[_follow]) { continue + } // didn't find a parent for it or it has not been seen yet // so go ahead and process it. @@ -1328,8 +1383,9 @@ This is a one-time fix-up, please be patient... } } - if (this[_depsQueue].length) + if (this[_depsQueue].length) { return this[_buildDepStep]() + } } [_fixDepFlags] () { @@ -1344,8 +1400,9 @@ This is a one-time fix-up, please be patient... // all set to true, and there can be nothing extraneous, so there's // nothing to prune, because we built it from scratch. if we didn't // add or remove anything, then also nothing to do. - if (metaFromDisk && mutateTree) + if (metaFromDisk && mutateTree) { resetDepFlags(this.idealTree) + } // update all the dev/optional/etc flags in the tree // either we started with a fresh tree, or we @@ -1353,9 +1410,9 @@ This is a one-time fix-up, please be patient... // // if we started from a blank slate, or changed something, then // the dep flags will be all set to true. - if (!metaFromDisk || mutateTree) + if (!metaFromDisk || mutateTree) { calcDepFlags(this.idealTree) - else { + } else { // otherwise just unset all the flags on the root node // since they will sometimes have the default value this.idealTree.extraneous = false @@ -1370,25 +1427,29 @@ This is a one-time fix-up, please be patient... // then the tree is suspect. Prune what is marked as extraneous. // otherwise, don't bother. const needPrune = metaFromDisk && (mutateTree || flagsSuspect) - if (this[_prune] && needPrune) + if (this[_prune] && needPrune) { this[_idealTreePrune]() + } process.emit('timeEnd', 'idealTree:fixDepFlags') } [_idealTreePrune] () { - for (const node of this.idealTree.inventory.filter(n => n.extraneous)) + for (const node of this.idealTree.inventory.filter(n => n.extraneous)) { node.parent = null + } } [_pruneFailedOptional] () { for (const node of this[_loadFailures]) { - if (!node.optional) + if (!node.optional) { throw node.errors[0] + } const set = optionalSet(node) - for (const node of set) + for (const node of set) { node.parent = null + } } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/deduper.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/deduper.js index c78e42e7575d0e..1741c31a19a27b 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/deduper.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/deduper.js @@ -6,8 +6,9 @@ module.exports = cls => class Deduper extends cls { const tree = await this.loadVirtual().catch(() => this.loadActual()) const names = [] for (const name of tree.inventory.query('name')) { - if (tree.inventory.query('name', name).size > 1) + if (tree.inventory.query('name', name).size > 1) { names.push(name) + } } return this.reify({ ...options, diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js index b26a26c2be2abe..d8ca67faa6065d 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/index.js @@ -59,8 +59,9 @@ class Arborist extends Base { packumentCache: options.packumentCache || new Map(), log: options.log || procLog, } - if (options.saveType && !saveTypeMap.get(options.saveType)) + if (options.saveType && !saveTypeMap.get(options.saveType)) { throw new Error(`Invalid saveType ${options.saveType}`) + } this.cache = resolve(this.options.cache) this.path = resolve(this.options.path) process.emit('timeEnd', 'arborist:ctor') @@ -81,17 +82,20 @@ class Arborist extends Base { const dep = edge.to if (dep) { set.add(dep) - if (dep.isLink) + if (dep.isLink) { set.add(dep.target) + } } } for (const child of node.children.values()) { - if (child.extraneous) + if (child.extraneous) { extraneous.add(child) + } } } - for (const extra of extraneous) + for (const extra of extraneous) { set.add(extra) + } return set } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js index 0338b2cd847b2c..68e58af7d98582 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -78,8 +78,9 @@ module.exports = cls => class ActualLoader extends cls { [_resetDepFlags] (tree, root) { // reset all deps to extraneous prior to recalc if (!root) { - for (const node of tree.inventory.values()) + for (const node of tree.inventory.values()) { node.extraneous = true + } } // only reset root flags if we're not re-rooting, @@ -176,8 +177,9 @@ module.exports = cls => class ActualLoader extends cls { await this[_loadFSTree](this[_actualTree]) await this[_loadWorkspaces](this[_actualTree]) await this[_loadWorkspaceTargets](this[_actualTree]) - if (!ignoreMissing) + if (!ignoreMissing) { await this[_findMissingEdges]() + } this[_findFSParents]() this[_transplant](root) @@ -200,8 +202,9 @@ module.exports = cls => class ActualLoader extends cls { // if there are workspace targets without Link nodes created, load // the targets, so that we know what they are. async [_loadWorkspaceTargets] (tree) { - if (!tree.workspaces || !tree.workspaces.size) + if (!tree.workspaces || !tree.workspaces.size) { return + } const promises = [] for (const path of tree.workspaces.values()) { @@ -215,18 +218,21 @@ module.exports = cls => class ActualLoader extends cls { } [_transplant] (root) { - if (!root || root === this[_actualTree]) + if (!root || root === this[_actualTree]) { return + } this[_actualTree][_changePath](root.path) for (const node of this[_actualTree].children.values()) { - if (!this[_transplantFilter](node)) + if (!this[_transplantFilter](node)) { node.root = null + } } root.replace(this[_actualTree]) - for (const node of this[_actualTree].fsChildren) + for (const node of this[_actualTree].fsChildren) { node.root = this[_transplantFilter](node) ? root : null + } this[_actualTree] = root } @@ -291,8 +297,9 @@ module.exports = cls => class ActualLoader extends cls { // it'll get parented later, making the fsParent scan a no-op, but better // safe than sorry, since it's cheap. const { parent, realpath } = options - if (!parent) + if (!parent) { this[_topNodes].add(realpath) + } return process.env._TEST_ARBORIST_SLOW_LINK_TARGET_ === '1' ? new Promise(res => setTimeout(() => res(new Node(options)), 100)) : new Node(options) @@ -309,8 +316,9 @@ module.exports = cls => class ActualLoader extends cls { // if a link target points at a node outside of the root tree's // node_modules hierarchy, then load that node as well. return this[_loadFSTree](link.target).then(() => link) - } else if (target.then) + } else if (target.then) { target.then(node => link.target = node) + } return link } @@ -321,13 +329,15 @@ module.exports = cls => class ActualLoader extends cls { // if a Link target has started, but not completed, then // a Promise will be in the cache to indicate this. - if (node.then) + if (node.then) { return node.then(node => this[_loadFSTree](node)) + } // impossible except in pathological ELOOP cases /* istanbul ignore if */ - if (did.has(node.realpath)) + if (did.has(node.realpath)) { return Promise.resolve(node) + } did.add(node.realpath) return this[_loadFSChildren](node) @@ -371,8 +381,11 @@ module.exports = cls => class ActualLoader extends cls { const depPromises = [] for (const [name, edge] of node.edgesOut.entries()) { - if (!edge.missing && !(edge.to && (edge.to.dummy || edge.to.parent !== node))) + const notMissing = !edge.missing && + !(edge.to && (edge.to.dummy || edge.to.parent !== node)) + if (notMissing) { continue + } // start the walk from the dirname, because we would have found // the dep in the loadFSTree step already if it was local. @@ -383,14 +396,16 @@ module.exports = cls => class ActualLoader extends cls { // allows for finding the transitive deps of link targets. // ie, if it has to go up and back out to get to the path // from the nearest common ancestor, we've gone too far. - if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) + if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) { break + } const entries = nmContents.get(p) || await readdir(p + '/node_modules').catch(() => []) nmContents.set(p, entries) - if (!entries.includes(name)) + if (!entries.includes(name)) { continue + } const d = this[_cache].has(p) ? await this[_cache].get(p) : new Node({ path: p, root: node.root, dummy: true }) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js index d1edcaca01d7e1..fa0aa0746e11a0 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -40,8 +40,9 @@ module.exports = cls => class VirtualLoader extends cls { // public method async loadVirtual (options = {}) { - if (this.virtualTree) + if (this.virtualTree) { return this.virtualTree + } // allow the user to set reify options on the ctor as well. // XXX: deprecate separate reify() options object. @@ -85,18 +86,21 @@ module.exports = cls => class VirtualLoader extends cls { root.optional = false root.devOptional = false root.peer = false - } else + } else { this[flagsSuspect] = true + } this[checkRootEdges](s, root) root.meta = s this.virtualTree = root const {links, nodes} = this[resolveNodes](s, root) await this[resolveLinks](links, nodes) - if (!(s.originalLockfileVersion >= 2)) + if (!(s.originalLockfileVersion >= 2)) { this[assignBundles](nodes) - if (this[flagsSuspect]) + } + if (this[flagsSuspect]) { this[reCalcDepFlags](nodes.values()) + } return root } @@ -104,8 +108,9 @@ module.exports = cls => class VirtualLoader extends cls { // reset all dep flags // can't use inventory here, because virtualTree might not be root for (const node of nodes) { - if (node.isRoot || node === this[rootOptionProvided]) + if (node.isRoot || node === this[rootOptionProvided]) { continue + } node.extraneous = true node.dev = true node.optional = true @@ -123,8 +128,9 @@ module.exports = cls => class VirtualLoader extends cls { // loaded virtually from tree, no chance of being out of sync // ancient lockfiles are critically damaged by this process, // so we need to just hope for the best in those cases. - if (!s.loadedFromDisk || s.ancientLockfile) + if (!s.loadedFromDisk || s.ancientLockfile) { return + } const lock = s.get('') const prod = lock.dependencies || {} @@ -140,16 +146,18 @@ module.exports = cls => class VirtualLoader extends cls { } } } - for (const name of Object.keys(optional)) + for (const name of Object.keys(optional)) { delete prod[name] + } const lockWS = [] const workspaces = this[loadWorkspacesVirtual]({ cwd: this.path, lockfile: s.data, }) - for (const [name, path] of workspaces.entries()) + for (const [name, path] of workspaces.entries()) { lockWS.push(['workspace', name, `file:${path}`]) + } const lockEdges = [ ...depsToEdges('prod', prod), @@ -174,8 +182,9 @@ module.exports = cls => class VirtualLoader extends cls { for (let i = 0; i < lockEdges.length; i++) { if (rootEdges[i][0] !== lockEdges[i][0] || rootEdges[i][1] !== lockEdges[i][1] || - rootEdges[i][2] !== lockEdges[i][2]) + rootEdges[i][2] !== lockEdges[i][2]) { return this[flagsSuspect] = true + } } } @@ -185,13 +194,15 @@ module.exports = cls => class VirtualLoader extends cls { const nodes = new Map([['', root]]) for (const [location, meta] of Object.entries(s.data.packages)) { // skip the root because we already got it - if (!location) + if (!location) { continue + } - if (meta.link) + if (meta.link) { links.set(location, meta) - else + } else { nodes.set(location, this[loadNode](location, meta)) + } } return {links, nodes} } @@ -212,8 +223,9 @@ module.exports = cls => class VirtualLoader extends cls { if (!link.target.parent) { const pj = link.realpath + '/package.json' const pkg = await rpj(pj).catch(() => null) - if (pkg) + if (pkg) { link.target.package = pkg + } } } } @@ -221,12 +233,14 @@ module.exports = cls => class VirtualLoader extends cls { [assignBundles] (nodes) { for (const [location, node] of nodes) { // Skip assignment of parentage for the root package - if (!location || node.isLink && !node.target.location) + if (!location || node.isLink && !node.target.location) { continue + } const { name, parent, package: { inBundle }} = node - if (!parent) + if (!parent) { continue + } // read inBundle from package because 'package' here is // actually a v2 lockfile metadata entry. @@ -236,10 +250,11 @@ module.exports = cls => class VirtualLoader extends cls { const { package: ppkg } = parent const { inBundle: parentBundled } = ppkg if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) { - if (!ppkg.bundleDependencies) + if (!ppkg.bundleDependencies) { ppkg.bundleDependencies = [name] - else + } else { ppkg.bundleDependencies.push(name) + } } } } @@ -248,8 +263,9 @@ module.exports = cls => class VirtualLoader extends cls { const p = this.virtualTree ? this.virtualTree.realpath : this.path const path = resolve(p, location) // shrinkwrap doesn't include package name unless necessary - if (!sw.name) + if (!sw.name) { sw.name = nameFromFolder(path) + } const dev = sw.dev const optional = sw.optional diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js index 93d078415f5851..0a7965ae30ca1e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/load-workspaces.js @@ -7,15 +7,17 @@ const _loadWorkspacesVirtual = Symbol.for('loadWorkspacesVirtual') module.exports = cls => class MapWorkspaces extends cls { [_appendWorkspaces] (node, workspaces) { - if (node && workspaces.size) + if (node && workspaces.size) { node.workspaces = workspaces + } return node } async [_loadWorkspaces] (node) { - if (node.workspaces) + if (node.workspaces) { return node + } const workspaces = await mapWorkspaces({ cwd: node.path, diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js index 8e447bb8f5ad16..743794f4bda51d 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js @@ -61,8 +61,9 @@ module.exports = cls => class Builder extends cls { async rebuild ({ nodes, handleOptionalFailure = false } = {}) { // nothing to do if we're not building anything! - if (this[_ignoreScripts] && !this[_binLinks]) + if (this[_ignoreScripts] && !this[_binLinks]) { return + } // when building for the first time, as part of reify, we ignore // failures in optional nodes, and just delete them. however, when @@ -76,8 +77,9 @@ module.exports = cls => class Builder extends cls { if (this[_workspaces] && this[_workspaces].length) { const filterSet = this.workspaceDependencySet(tree, this[_workspaces]) nodes = tree.inventory.filter(node => filterSet.has(node)) - } else + } else { nodes = tree.inventory.values() + } } // separates links nodes so that it can run @@ -88,10 +90,11 @@ module.exports = cls => class Builder extends cls { for (const node of nodes) { // we skip the target nodes to that workspace in order to make sure // we only run lifecycle scripts / place bin links once per workspace - if (node.isLink) + if (node.isLink) { linkNodes.add(node) - else + } else { depNodes.add(node) + } } await this[_build](depNodes, {}) @@ -118,17 +121,20 @@ module.exports = cls => class Builder extends cls { process.emit('time', `build:${type}`) await this[_buildQueues](nodes) - if (!this[_ignoreScripts]) + if (!this[_ignoreScripts]) { await this[_runScripts]('preinstall') - if (this[_binLinks] && type !== 'links') + } + if (this[_binLinks] && type !== 'links') { await this[_linkAllBins]() + } // links should also run prepare scripts and only link bins after that if (type === 'links') { await this[_runScripts]('prepare') - if (this[_binLinks]) + if (this[_binLinks]) { await this[_linkAllBins]() + } } if (!this[_ignoreScripts]) { @@ -173,8 +179,9 @@ module.exports = cls => class Builder extends cls { const { preinstall, install, postinstall, prepare } = scripts const tests = { bin, preinstall, install, postinstall, prepare } for (const [key, has] of Object.entries(tests)) { - if (has) + if (has) { this[_queues][key].push(node) + } } } process.emit('timeEnd', 'build:queue') @@ -186,15 +193,17 @@ module.exports = cls => class Builder extends cls { // the node path. Otherwise a package can have a preinstall script // that unlinks something, to allow them to silently overwrite system // binaries, which is unsafe and insecure. - if (!node.globalTop || this[_force]) + if (!node.globalTop || this[_force]) { return + } const { path, package: pkg } = node await binLinks.checkBins({ pkg, path, top: true, global: true }) } async [_addToBuildSet] (node, set, refreshed = false) { - if (set.has(node)) + if (set.has(node)) { return + } if (this[_oldMeta] === null) { const {root: {meta}} = node @@ -233,8 +242,9 @@ module.exports = cls => class Builder extends cls { await isNodeGypPackage(node.path) if (bin || preinstall || install || postinstall || prepare || isGyp) { - if (bin) + if (bin) { await this[_checkBins](node) + } if (isGyp) { scripts.install = defaultGypInstallScript node.package.scripts = scripts @@ -246,8 +256,9 @@ module.exports = cls => class Builder extends cls { async [_runScripts] (event) { const queue = this[_queues][event] - if (!queue.length) + if (!queue.length) { return + } process.emit('time', `build:run:${event}`) const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' @@ -266,8 +277,9 @@ module.exports = cls => class Builder extends cls { } = node.target // skip any that we know we'll be deleting - if (this[_trashList].has(path)) + if (this[_trashList].has(path)) { return + } const timer = `build:run:${event}:${location}` process.emit('time', timer) @@ -321,23 +333,26 @@ module.exports = cls => class Builder extends cls { async [_linkAllBins] () { const queue = this[_queues].bin - if (!queue.length) + if (!queue.length) { return + } process.emit('time', 'build:link') const promises = [] // sort the queue by node path, so that the module-local collision // detector in bin-links will always resolve the same way. - for (const node of queue.sort(sortNodes)) + for (const node of queue.sort(sortNodes)) { promises.push(this[_createBinLinks](node)) + } await promiseAllRejectLate(promises) process.emit('timeEnd', 'build:link') } async [_createBinLinks] (node) { - if (this[_trashList].has(node.path)) + if (this[_trashList].has(node.path)) { return + } process.emit('time', `build:link:${node.location}`) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 965435f84fb417..3a9c4797488648 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -158,8 +158,9 @@ module.exports = cls => class Reifier extends cls { async [_validatePath] () { // don't create missing dirs on dry runs - if (this[_packageLockOnly] || this[_dryRun]) + if (this[_packageLockOnly] || this[_dryRun]) { return + } // we do NOT want to set ownership on this folder, especially // recursively, because it can have other side effects to do that @@ -172,8 +173,9 @@ module.exports = cls => class Reifier extends cls { async [_reifyPackages] () { // we don't submit the audit report or write to disk on dry runs - if (this[_dryRun]) + if (this[_dryRun]) { return + } if (this[_packageLockOnly]) { // we already have the complete tree, so just audit it now, @@ -220,8 +222,9 @@ module.exports = cls => class Reifier extends cls { for (const action of actions) { try { await this[action]() - if (reifyTerminated) + if (reifyTerminated) { throw reifyTerminated + } } catch (er) { await this[rollback](er) /* istanbul ignore next - rollback throws, should never hit this */ @@ -233,8 +236,9 @@ module.exports = cls => class Reifier extends cls { // no rollback for this one, just exit with the error, since the // install completed and can't be safely recovered at this point. await this[_removeTrash]() - if (reifyTerminated) + if (reifyTerminated) { throw reifyTerminated + } // done modifying the file system, no need to keep listening for sigs removeHandler() @@ -261,18 +265,21 @@ module.exports = cls => class Reifier extends cls { filter: (node, kid) => { // if it's not the project root, and we have no explicit requests, // then we're already into a nested dep, so we keep it - if (this.explicitRequests.size === 0 || !node.isProjectRoot) + if (this.explicitRequests.size === 0 || !node.isProjectRoot) { return true + } // if we added it as an edgeOut, then we want it - if (this.idealTree.edgesOut.has(kid)) + if (this.idealTree.edgesOut.has(kid)) { return true + } // if it's an explicit request, then we want it const hasExplicit = [...this.explicitRequests] .some(edge => edge.name === kid) - if (hasExplicit) + if (hasExplicit) { return true + } // ignore the rest of the global install folder return false @@ -280,8 +287,10 @@ module.exports = cls => class Reifier extends cls { } : { ignoreMissing: true } if (!this[_global]) { - return Promise.all([this.loadActual(actualOpt), this.buildIdealTree(bitOpt)]) - .then(() => process.emit('timeEnd', 'reify:loadTrees')) + return Promise.all([ + this.loadActual(actualOpt), + this.buildIdealTree(bitOpt), + ]).then(() => process.emit('timeEnd', 'reify:loadTrees')) } // the global install space tends to have a lot of stuff in it. don't @@ -295,8 +304,9 @@ module.exports = cls => class Reifier extends cls { } [_diffTrees] () { - if (this[_packageLockOnly]) + if (this[_packageLockOnly]) { return + } process.emit('time', 'reify:diffTrees') // XXX if we have an existing diff already, there should be a way @@ -311,20 +321,24 @@ module.exports = cls => class Reifier extends cls { // children where there's an explicit request. for (const { name } of this.explicitRequests) { const ideal = idealTree.children.get(name) - if (ideal) + if (ideal) { filterNodes.push(ideal) + } const actual = actualTree.children.get(name) - if (actual) + if (actual) { filterNodes.push(actual) + } } } else { for (const ws of this[_workspaces]) { const ideal = this.idealTree.children.get(ws) - if (ideal) + if (ideal) { filterNodes.push(ideal) + } const actual = this.actualTree.children.get(ws) - if (actual) + if (actual) { filterNodes.push(actual) + } } } @@ -360,8 +374,9 @@ module.exports = cls => class Reifier extends cls { const retired = retirePath(path) moves[path] = retired this[_trashList].add(retired) - } else + } else { this[_trashList].add(path) + } } } @@ -393,10 +408,11 @@ module.exports = cls => class Reifier extends cls { if (er.code === 'ENOENT') { return didMkdirp ? null : mkdirp(dirname(to)).then(() => this[_renamePath](from, to, true)) - } else if (er.code === 'EEXIST') + } else if (er.code === 'EEXIST') { return rimraf(to).then(() => moveFile(from, to)) - else + } else { throw er + } }) } @@ -417,8 +433,9 @@ module.exports = cls => class Reifier extends cls { // adding to the trash list will skip reifying, and delete them // if they are currently in the tree and otherwise untouched. [_addOmitsToTrashList] () { - if (!this[_omitDev] && !this[_omitOptional] && !this[_omitPeer]) + if (!this[_omitDev] && !this[_omitOptional] && !this[_omitPeer]) { return + } process.emit('time', 'reify:trashOmits') @@ -429,8 +446,9 @@ module.exports = cls => class Reifier extends cls { node.optional && this[_omitOptional] || node.devOptional && this[_omitOptional] && this[_omitDev]) - for (const node of this.idealTree.inventory.filter(filter)) + for (const node of this.idealTree.inventory.filter(filter)) { this[_addNodeToTrashList](node) + } process.emit('timeEnd', 'reify:trashOmits') } @@ -452,10 +470,12 @@ module.exports = cls => class Reifier extends cls { const dirsChecked = new Set() return promiseAllRejectLate(leaves.map(async node => { for (const d of walkUp(node.path)) { - if (d === node.top.path) + if (d === node.top.path) { break - if (dirsChecked.has(d)) + } + if (dirsChecked.has(d)) { continue + } dirsChecked.add(d) const st = await lstat(d).catch(er => null) // this can happen if we have a link to a package with a name @@ -487,8 +507,9 @@ module.exports = cls => class Reifier extends cls { .map(path => rimraf(path).catch(er => failures.push([path, er]))) return promiseAllRejectLate(unlinks) .then(() => { - if (failures.length) + if (failures.length) { this.log.warn('cleanup', 'Failed to remove some directories', failures) + } }) .then(() => process.emit('timeEnd', 'reify:rollback:createSparse')) .then(() => this[_rollbackRetireShallowNodes](er)) @@ -504,8 +525,9 @@ module.exports = cls => class Reifier extends cls { d.ideal.hasShrinkwrap && !seen.has(d.ideal) && !this[_trashList].has(d.ideal.path)) - if (!shrinkwraps.length) + if (!shrinkwraps.length) { return + } process.emit('time', 'reify:loadShrinkwraps') @@ -535,8 +557,9 @@ module.exports = cls => class Reifier extends cls { // to the trash list // Always return the node. [_reifyNode] (node) { - if (this[_trashList].has(node.path)) + if (this[_trashList].has(node.path)) { return node + } const timer = `reifyNode:${node.location}` process.emit('time', timer) @@ -569,8 +592,9 @@ module.exports = cls => class Reifier extends cls { // do not allow node_modules to be a symlink async [_validateNodeModules] (nm) { - if (this[_force] || this[_nmValidated].has(nm)) + if (this[_force] || this[_nmValidated].has(nm)) { return + } const st = await lstat(nm).catch(() => null) if (!st || st.isDirectory()) { this[_nmValidated].add(nm) @@ -642,8 +666,9 @@ module.exports = cls => class Reifier extends cls { [_warnDeprecated] (node) { const {_id, deprecated} = node.package - if (deprecated) + if (deprecated) { this.log.warn('deprecated', `${_id}: ${deprecated}`) + } } // if the node is optional, then the failure of the promise is nonfatal @@ -677,8 +702,9 @@ module.exports = cls => class Reifier extends cls { [_loadBundlesAndUpdateTrees] ( depth = 0, bundlesByDepth = this[_getBundlesByDepth]() ) { - if (depth === 0) + if (depth === 0) { process.emit('time', 'reify:loadBundles') + } const maxBundleDepth = bundlesByDepth.get('maxBundleDepth') if (depth > maxBundleDepth) { @@ -698,8 +724,9 @@ module.exports = cls => class Reifier extends cls { node.target !== node.root && !this[_trashList].has(node.path)) - if (!set.length) + if (!set.length) { return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth) + } // extract all the nodes with bundles return promiseAllRejectLate(set.map(node => { @@ -725,12 +752,14 @@ module.exports = cls => class Reifier extends cls { // it's actually in the bundle if it gets transplanted notTransplanted.delete(node.name) return true - } else + } else { return false + } }, }) - for (const name of notTransplanted) + for (const name of notTransplanted) { this[_bundleMissing].add(node.children.get(name)) + } }))) // move onto the next level of bundled items .then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)) @@ -743,18 +772,21 @@ module.exports = cls => class Reifier extends cls { tree: this.diff, visit: diff => { const node = diff.ideal - if (!node) + if (!node) { return - if (node.isProjectRoot) + } + if (node.isProjectRoot) { return + } const { bundleDependencies } = node.package if (bundleDependencies && bundleDependencies.length) { maxBundleDepth = Math.max(maxBundleDepth, node.depth) - if (!bundlesByDepth.has(node.depth)) + if (!bundlesByDepth.has(node.depth)) { bundlesByDepth.set(node.depth, [node]) - else + } else { bundlesByDepth.get(node.depth).push(node) + } } }, getChildren: diff => diff.children, @@ -791,13 +823,15 @@ module.exports = cls => class Reifier extends cls { // create the list of nodes shadowed by children of bundlers for (const bundles of bundlesByDepth.values()) { // skip the 'maxBundleDepth' item - if (!Array.isArray(bundles)) + if (!Array.isArray(bundles)) { continue + } for (const node of bundles) { for (const name of node.children.keys()) { const shadow = node.parent.resolve(name) - if (!shadow) + if (!shadow) { continue + } bundleShadowed.add(shadow) shadow.extraneous = true } @@ -851,8 +885,9 @@ module.exports = cls => class Reifier extends cls { } [_submitQuickAudit] () { - if (this.options.audit === false) + if (this.options.audit === false) { return this.auditReport = null + } // we submit the quick audit at this point in the process, as soon as // we have all the deps resolved, so that it can overlap with the other @@ -865,8 +900,9 @@ module.exports = cls => class Reifier extends cls { const tree = this.idealTree // if we're operating on a workspace, only audit the workspace deps - if (this[_workspaces] && this[_workspaces].length) + if (this[_workspaces] && this[_workspaces].length) { options.filterSet = this.workspaceDependencySet(tree, this[_workspaces]) + } this.auditReport = AuditReport.load(tree, options) .then(res => { @@ -891,8 +927,9 @@ module.exports = cls => class Reifier extends cls { tree: this.diff, visit: diff => { // no unpacking if we don't want to change this thing - if (diff.action !== 'CHANGE' && diff.action !== 'ADD') + if (diff.action !== 'CHANGE' && diff.action !== 'ADD') { return + } const node = diff.ideal const bd = this[_bundleUnpacked].has(node) @@ -902,13 +939,18 @@ module.exports = cls => class Reifier extends cls { // check whether we still need to unpack this one. // test the inDepBundle last, since that's potentially a tree walk. const doUnpack = node && // can't unpack if removed! - !node.isRoot && // root node already exists - !bd && // already unpacked to read bundle - !sw && // already unpacked to read sw - (bundleMissing || !node.inDepBundle) // already unpacked by another dep's bundle - - if (doUnpack) + // root node already exists + !node.isRoot && + // already unpacked to read bundle + !bd && + // already unpacked to read sw + !sw && + // already unpacked by another dep's bundle + (bundleMissing || !node.inDepBundle) + + if (doUnpack) { unpacks.push(this[_reifyNode](node)) + } }, getChildren: diff => diff.children, }) @@ -933,8 +975,9 @@ module.exports = cls => class Reifier extends cls { this[_retiredUnchanged] = {} return promiseAllRejectLate(this.diff.children.map(diff => { // skip if nothing was retired - if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') + if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') { return + } const { path: realFolder } = diff.actual const retireFolder = moves[realFolder] @@ -955,12 +998,14 @@ module.exports = cls => class Reifier extends cls { this[_retiredUnchanged][retireFolder] = [] return promiseAllRejectLate(diff.unchanged.map(node => { // no need to roll back links, since we'll just delete them anyway - if (node.isLink) + if (node.isLink) { return mkdirp(dirname(node.path)).then(() => this[_reifyNode](node)) + } // will have been moved/unpacked along with bundler - if (node.inDepBundle && !this[_bundleMissing].has(node)) + if (node.inDepBundle && !this[_bundleMissing].has(node)) { return + } this[_retiredUnchanged][retireFolder].push(node) @@ -1014,8 +1059,9 @@ module.exports = cls => class Reifier extends cls { dfwalk({ tree: this.diff, leave: diff => { - if (!diff.ideal.isProjectRoot) + if (!diff.ideal.isProjectRoot) { nodes.push(diff.ideal) + } }, // process adds before changes, ignore removals getChildren: diff => diff && diff.children, @@ -1030,8 +1076,9 @@ module.exports = cls => class Reifier extends cls { // skip links that only live within node_modules as they are most // likely managed by packages we installed, we only want to rebuild // unchanged links we directly manage - if (node.isLink && node.target.fsTop === tree) + if (node.isLink && node.target.fsTop === tree) { nodes.push(node) + } } return this.rebuild({ nodes, handleOptionalFailure: true }) @@ -1048,12 +1095,14 @@ module.exports = cls => class Reifier extends cls { const failures = [] const rm = path => rimraf(path).catch(er => failures.push([path, er])) - for (const path of this[_trashList]) + for (const path of this[_trashList]) { promises.push(rm(path)) + } return promiseAllRejectLate(promises).then(() => { - if (failures.length) + if (failures.length) { this.log.warn('cleanup', 'Failed to remove some directories', failures) + } }) .then(() => process.emit('timeEnd', 'reify:trash')) } @@ -1067,8 +1116,9 @@ module.exports = cls => class Reifier extends cls { // save it first, then prune out the optional trash, and then return it. // support save=false option - if (options.save === false || this[_global] || this[_dryRun]) + if (options.save === false || this[_global] || this[_dryRun]) { return false + } process.emit('time', 'reify:save') @@ -1089,6 +1139,14 @@ module.exports = cls => class Reifier extends cls { const spec = subSpec ? subSpec.rawSpec : rawSpec const child = edge.to + // if we tried to install an optional dep, but it was a version + // that we couldn't resolve, this MAY be missing. if we haven't + // blown up by now, it's because it was not a problem, though, so + // just move on. + if (!child) { + continue + } + let newSpec if (req.registry) { const version = child.version @@ -1105,8 +1163,9 @@ module.exports = cls => class Reifier extends cls { !isRange || spec === '*' || subset(prefixRange, spec, { loose: true }) - ) + ) { range = prefixRange + } const pname = child.packageName const alias = name !== pname @@ -1115,10 +1174,11 @@ module.exports = cls => class Reifier extends cls { // save the git+https url if it has auth, otherwise shortcut const h = req.hosted const opt = { noCommittish: false } - if (h.https && h.auth) + if (h.https && h.auth) { newSpec = `git+${h.https(opt)}` - else + } else { newSpec = h.shortcut(opt) + } } else if (req.type === 'directory' || req.type === 'file') { // save the relative path in package.json // Normally saveSpec is updated with the proper relative @@ -1128,34 +1188,41 @@ module.exports = cls => class Reifier extends cls { const p = req.fetchSpec.replace(/^file:/, '') const rel = relpath(addTree.realpath, p) newSpec = `file:${rel}` - } else + } else { newSpec = req.saveSpec + } if (options.saveType) { const depType = saveTypeMap.get(options.saveType) pkg[depType][name] = newSpec // rpj will have moved it here if it was in both // if it is empty it will be deleted later - if (options.saveType === 'prod' && pkg.optionalDependencies) + if (options.saveType === 'prod' && pkg.optionalDependencies) { delete pkg.optionalDependencies[name] + } } else { - if (hasSubKey(pkg, 'dependencies', name)) + if (hasSubKey(pkg, 'dependencies', name)) { pkg.dependencies[name] = newSpec + } if (hasSubKey(pkg, 'devDependencies', name)) { pkg.devDependencies[name] = newSpec // don't update peer or optional if we don't have to - if (hasSubKey(pkg, 'peerDependencies', name) && !intersects(newSpec, pkg.peerDependencies[name])) + if (hasSubKey(pkg, 'peerDependencies', name) && !intersects(newSpec, pkg.peerDependencies[name])) { pkg.peerDependencies[name] = newSpec + } - if (hasSubKey(pkg, 'optionalDependencies', name) && !intersects(newSpec, pkg.optionalDependencies[name])) + if (hasSubKey(pkg, 'optionalDependencies', name) && !intersects(newSpec, pkg.optionalDependencies[name])) { pkg.optionalDependencies[name] = newSpec + } } else { - if (hasSubKey(pkg, 'peerDependencies', name)) + if (hasSubKey(pkg, 'peerDependencies', name)) { pkg.peerDependencies[name] = newSpec + } - if (hasSubKey(pkg, 'optionalDependencies', name)) + if (hasSubKey(pkg, 'optionalDependencies', name)) { pkg.optionalDependencies[name] = newSpec + } } } @@ -1196,8 +1263,9 @@ module.exports = cls => class Reifier extends cls { } // grab any from explicitRequests that had deps removed - for (const { from: tree } of this.explicitRequests) + for (const { from: tree } of this.explicitRequests) { updatedTrees.add(tree) + } for (const tree of updatedTrees) { // refresh the edges so they have the correct specs @@ -1211,8 +1279,9 @@ module.exports = cls => class Reifier extends cls { } async [_saveLockFile] (saveOpt) { - if (!this[_usePackageLock]) + if (!this[_usePackageLock]) { return + } const { meta } = this.idealTree @@ -1224,8 +1293,9 @@ module.exports = cls => class Reifier extends cls { for (const path of this[_trashList]) { const loc = relpath(this.idealTree.realpath, path) const node = this.idealTree.inventory.get(loc) - if (node && node.root === this.idealTree) + if (node && node.root === this.idealTree) { node.parent = null + } } // if we filtered to only certain nodes, then anything ELSE needs @@ -1244,54 +1314,60 @@ module.exports = cls => class Reifier extends cls { // if it's an ideal node from the filter set, then skip it // because we already made whatever changes were necessary - if (filterSet.has(ideal)) + if (filterSet.has(ideal)) { continue + } // otherwise, if it's not in the actualTree, then it's not a thing // that we actually added. And if it IS in the actualTree, then // it's something that we left untouched, so we need to record // that. const actual = this.actualTree.inventory.get(loc) - if (!actual) + if (!actual) { ideal.root = null - else { + } else { if ([...actual.linksIn].some(link => filterSet.has(link))) { seen.add(actual.location) continue } const { realpath, isLink } = actual - if (isLink && ideal.isLink && ideal.realpath === realpath) + if (isLink && ideal.isLink && ideal.realpath === realpath) { continue - else + } else { reroot.add(actual) + } } } // now find any actual nodes that may not be present in the ideal // tree, but were left behind by virtue of not being in the filter for (const [loc, actual] of this.actualTree.inventory.entries()) { - if (seen.has(loc)) + if (seen.has(loc)) { continue + } seen.add(loc) // we know that this is something that ISN'T in the idealTree, // or else we will have addressed it in the previous loop. // If it's in the filterSet, that means we intentionally removed // it, so nothing to do here. - if (filterSet.has(actual)) + if (filterSet.has(actual)) { continue + } reroot.add(actual) } // go through the rerooted actual nodes, and move them over. - for (const actual of reroot) + for (const actual of reroot) { actual.root = this.idealTree + } // prune out any tops that lack a linkIn, they are no longer relevant. for (const top of this.idealTree.tops) { - if (top.linksIn.size === 0) + if (top.linksIn.size === 0) { top.root = null + } } // need to calculate dep flags, since nodes may have been marked @@ -1307,7 +1383,8 @@ module.exports = cls => class Reifier extends cls { this.actualTree = this.idealTree this.idealTree = null - if (!this[_global]) + if (!this[_global]) { await this.actualTree.meta.save() + } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/audit-report.js b/deps/npm/node_modules/@npmcli/arborist/lib/audit-report.js index 8f7d6546d64f45..2e6c207b33eb3e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/audit-report.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/audit-report.js @@ -63,8 +63,9 @@ class AuditReport extends Map { prod = false } } - if (prod) + if (prod) { dependencies.prod++ + } } // if it doesn't have any topVulns, then it's fixable with audit fix @@ -104,8 +105,9 @@ class AuditReport extends Map { async run () { this.report = await this[_getReport]() this.log.silly('audit report', this.report) - if (this.report) + if (this.report) { await this[_init]() + } return this } @@ -119,8 +121,9 @@ class AuditReport extends Map { const promises = [] for (const [name, advisories] of Object.entries(this.report)) { - for (const advisory of advisories) + for (const advisory of advisories) { promises.push(this.calculator.calculate(name, advisory)) + } } // now the advisories are calculated with a set of versions @@ -136,43 +139,51 @@ class AuditReport extends Map { // adding multiple advisories with the same range is fine, but no // need to search for nodes we already would have added. const k = `${name}@${range}` - if (seen.has(k)) + if (seen.has(k)) { continue + } seen.add(k) const vuln = this.get(name) || new Vuln({ name, advisory }) - if (this.has(name)) + if (this.has(name)) { vuln.addAdvisory(advisory) + } super.set(name, vuln) const p = [] for (const node of this.tree.inventory.query('packageName', name)) { - if (!shouldAudit(node, this[_omit], this.filterSet)) + if (!shouldAudit(node, this[_omit], this.filterSet)) { continue + } // if not vulnerable by this advisory, keep searching - if (!advisory.testVersion(node.version)) + if (!advisory.testVersion(node.version)) { continue + } // we will have loaded the source already if this is a metavuln - if (advisory.type === 'metavuln') + if (advisory.type === 'metavuln') { vuln.addVia(this.get(advisory.dependency)) + } // already marked this one, no need to do it again - if (vuln.nodes.has(node)) + if (vuln.nodes.has(node)) { continue + } // haven't marked this one yet. get its dependents. vuln.nodes.add(node) for (const { from: dep, spec } of node.edgesIn) { - if (dep.isTop && !vuln.topNodes.has(dep)) + if (dep.isTop && !vuln.topNodes.has(dep)) { this[_checkTopNode](dep, vuln, spec) - else { + } else { // calculate a metavuln, if necessary - p.push(this.calculator.calculate(dep.packageName, advisory).then(meta => { - if (meta.testVersion(dep.version, spec)) + const calc = this.calculator.calculate(dep.packageName, advisory) + p.push(calc.then(meta => { + if (meta.testVersion(dep.version, spec)) { advisories.add(meta) + } })) } } @@ -193,9 +204,11 @@ class AuditReport extends Map { // the nodes it references, then remove it from the advisory list. // happens when using omit with old audit endpoint. for (const advisory of vuln.advisories) { - const relevant = [...vuln.nodes].some(n => advisory.testVersion(n.version)) - if (!relevant) + const relevant = [...vuln.nodes] + .some(n => advisory.testVersion(n.version)) + if (!relevant) { vuln.deleteAdvisory(advisory) + } } } process.emit('timeEnd', 'auditReport:init') @@ -221,18 +234,21 @@ class AuditReport extends Map { // this will always be set to at least {name, versions:{}} const paku = vuln.packument - if (!vuln.testSpec(spec)) + if (!vuln.testSpec(spec)) { return true + } // similarly, even if we HAVE a packument, but we're looking for it // somewhere other than the registry, and we got something vulnerable, // then we're stuck with it. const specObj = npa(spec) - if (!specObj.registry) + if (!specObj.registry) { return false + } - if (specObj.subSpec) + if (specObj.subSpec) { spec = specObj.subSpec.rawSpec + } // We don't provide fixes for top nodes other than root, but we // still check to see if the node is fixable with a different version, @@ -287,8 +303,9 @@ class AuditReport extends Map { async [_getReport] () { // if we're not auditing, just return false - if (this.options.audit === false || this.tree.inventory.size === 1) + if (this.options.audit === false || this.tree.inventory.size === 1) { return null + } process.emit('time', 'auditReport:getReport') try { @@ -299,8 +316,9 @@ class AuditReport extends Map { // no sense asking if we don't have anything to audit, // we know it'll be empty - if (!Object.keys(body).length) + if (!Object.keys(body).length) { return null + } const res = await fetch('/-/npm/v1/security/advisories/bulk', { ...this.options, @@ -353,13 +371,15 @@ const prepareBulkData = (tree, omit, filterSet) => { for (const name of tree.inventory.query('packageName')) { const set = new Set() for (const node of tree.inventory.query('packageName', name)) { - if (!shouldAudit(node, omit, filterSet)) + if (!shouldAudit(node, omit, filterSet)) { continue + } set.add(node.version) } - if (set.size) + if (set.size) { payload[name] = [...set] + } } return payload } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/deps/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js index 968fc83c5136cf..95ecc8a617b083 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js @@ -11,7 +11,8 @@ const calcDepFlags = (tree, resetRoot = true) => { tree, visit: node => calcDepFlagsStep(node), filter: node => node, - getChildren: (node, tree) => [...tree.edgesOut.values()].map(edge => edge.to), + getChildren: (node, tree) => + [...tree.edgesOut.values()].map(edge => edge.to), }) return ret } @@ -39,8 +40,9 @@ const calcDepFlagsStep = (node) => { node.edgesOut.forEach(({peer, optional, dev, to}) => { // if the dep is missing, then its flags are already maximally unset - if (!to) + if (!to) { return + } // everything with any kind of edge into it is not extraneous to.extraneous = false @@ -59,28 +61,34 @@ const calcDepFlagsStep = (node) => { !node.optional && !optional const unsetPeer = !node.peer && !peer - if (unsetPeer) + if (unsetPeer) { unsetFlag(to, 'peer') + } - if (unsetDevOpt) + if (unsetDevOpt) { unsetFlag(to, 'devOptional') + } - if (unsetDev) + if (unsetDev) { unsetFlag(to, 'dev') + } - if (unsetOpt) + if (unsetOpt) { unsetFlag(to, 'optional') + } }) return node } const resetParents = (node, flag) => { - if (node[flag]) + if (node[flag]) { return + } - for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) + for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) { p[flag] = false + } } // typically a short walk, since it only traverses deps that @@ -92,8 +100,9 @@ const unsetFlag = (node, flag) => { tree: node, visit: node => { node.extraneous = node[flag] = false - if (node.isLink) + if (node.isLink) { node.target.extraneous = node.target[flag] = false + } }, getChildren: node => [...node.target.edgesOut.values()] .filter(edge => edge.to && edge.to[flag] && diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js b/deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js index 9601ad7af31631..7e2e1a0e2d29bb 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js @@ -64,14 +64,17 @@ class CanPlaceDep { } = options debug(() => { - if (!dep) + if (!dep) { throw new Error('no dep provided to CanPlaceDep') + } - if (!target) + if (!target) { throw new Error('no target provided to CanPlaceDep') + } - if (!edge) + if (!edge) { throw new Error('no edge provided to CanPlaceDep') + } this._treeSnapshot = JSON.stringify([...target.root.inventory.entries()] .map(([loc, {packageName, version, resolved}]) => { @@ -108,8 +111,9 @@ class CanPlaceDep { this.edgeOverride = !dep.satisfies(edge) this.canPlace = this.checkCanPlace() - if (!this.canPlaceSelf) + if (!this.canPlaceSelf) { this.canPlaceSelf = this.canPlace + } debug(() => { const treeSnapshot = JSON.stringify([...target.root.inventory.entries()] @@ -131,15 +135,18 @@ class CanPlaceDep { // if the dep failed to load, we're going to fail the build or // prune it out anyway, so just move forward placing/replacing it. - if (dep.errors.length) + if (dep.errors.length) { return current ? REPLACE : OK + } // cannot place peers inside their dependents, except for tops - if (targetEdge && targetEdge.peer && !target.isTop) + if (targetEdge && targetEdge.peer && !target.isTop) { return CONFLICT + } - if (targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) + if (targetEdge && !dep.satisfies(targetEdge) && targetEdge !== this.edge) { return CONFLICT + } return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent() } @@ -150,8 +157,9 @@ class CanPlaceDep { const { preferDedupe, explicitRequest, current, target, edge, dep } = this if (dep.matches(current)) { - if (current.satisfies(edge) || this.edgeOverride) + if (current.satisfies(edge) || this.edgeOverride) { return explicitRequest ? REPLACE : KEEP + } } const { version: curVer } = current @@ -163,19 +171,22 @@ class CanPlaceDep { * but it is theoretically possible if peer deps are pinned. In * that case we treat it like any other conflict, and keep trying */ const cpp = this.canPlacePeers(REPLACE) - if (cpp !== CONFLICT) + if (cpp !== CONFLICT) { return cpp + } } // ok, can't replace the current with new one, but maybe current is ok? - if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) + if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) { return KEEP + } // if we prefer deduping, then try replacing newer with older if (preferDedupe && !tryReplace && dep.canReplace(current)) { const cpp = this.canPlacePeers(REPLACE) - if (cpp !== CONFLICT) + if (cpp !== CONFLICT) { return cpp + } } // Check for interesting cases! @@ -185,29 +196,33 @@ class CanPlaceDep { const myDeepest = this.deepestNestingTarget // ok, i COULD be placed deeper, so leave the current one alone. - if (target !== myDeepest) + if (target !== myDeepest) { return CONFLICT + } // if we are not checking a peerDep, then we MUST place it here, in the // target that has a non-peer dep on it. - if (!edge.peer && target === edge.from) + if (!edge.peer && target === edge.from) { return this.canPlacePeers(REPLACE) + } // if we aren't placing a peer in a set, then we're done here. // This is ignored because it SHOULD be redundant, as far as I can tell, // with the deepest target and target===edge.from tests. But until we // can prove that isn't possible, this condition is here for safety. /* istanbul ignore if - allegedly impossible */ - if (!this.parent && !edge.peer) + if (!this.parent && !edge.peer) { return CONFLICT + } // check the deps in the peer group for each edge into that peer group // if ALL of them can be pushed deeper, or if it's ok to replace its // members with the contents of the new peer group, then we're good. let canReplace = true for (const [entryEdge, currentPeers] of peerEntrySets(current)) { - if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) + if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) { continue + } // First, see if it's ok to just replace the peerSet entirely. // we do this by walking out from the entryEdge, because in a case like @@ -231,8 +246,9 @@ class CanPlaceDep { const entryNode = entryEdge.to const entryRep = dep.parent.children.get(entryNode.name) if (entryRep) { - if (entryRep.canReplace(entryNode, dep.parent.children.keys())) + if (entryRep.canReplace(entryNode, dep.parent.children.keys())) { continue + } } let canClobber = !entryRep @@ -240,12 +256,14 @@ class CanPlaceDep { const peerReplacementWalk = new Set([entryNode]) OUTER: for (const currentPeer of peerReplacementWalk) { for (const edge of currentPeer.edgesOut.values()) { - if (!edge.peer || !edge.valid) + if (!edge.peer || !edge.valid) { continue + } const rep = dep.parent.children.get(edge.name) if (!rep) { - if (edge.to) + if (edge.to) { peerReplacementWalk.add(edge.to) + } continue } if (!rep.satisfies(edge)) { @@ -255,14 +273,16 @@ class CanPlaceDep { } } } - if (canClobber) + if (canClobber) { continue + } // ok, we can't replace, but maybe we can nest the current set deeper? let canNestCurrent = true for (const currentPeer of currentPeers) { - if (!canNestCurrent) + if (!canNestCurrent) { break + } // still possible to nest this peerSet const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name) @@ -270,14 +290,16 @@ class CanPlaceDep { canNestCurrent = false canReplace = false } - if (canNestCurrent) + if (canNestCurrent) { continue + } } } // if we can nest or replace all the current peer groups, we can replace. - if (canReplace) + if (canReplace) { return this.canPlacePeers(REPLACE) + } return CONFLICT } @@ -293,8 +315,9 @@ class CanPlaceDep { if (current) { for (const edge of current.edgesIn.values()) { if (edge.from.isDescendantOf(target) && edge.valid) { - if (!dep.satisfies(edge)) + if (!dep.satisfies(edge)) { return CONFLICT + } } } } @@ -316,8 +339,9 @@ class CanPlaceDep { get allChildren () { const set = new Set(this.children) for (const child of set) { - for (const grandchild of child.children) + for (const grandchild of child.children) { set.add(grandchild) + } } return [...set] } @@ -329,15 +353,17 @@ class CanPlaceDep { // check if peers can go here. returns state or CONFLICT canPlacePeers (state) { this.canPlaceSelf = state - if (this._canPlacePeers) + if (this._canPlacePeers) { return this._canPlacePeers + } // TODO: represent peerPath in ERESOLVE error somehow? const peerPath = [...this.peerPath, this.dep] let sawConflict = false for (const peerEdge of this.dep.edgesOut.values()) { - if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) + if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) { continue + } const peer = peerEdge.to // it may be the case that the *initial* dep can be nested, but a peer // of that dep needs to be placed shallower, because the target has @@ -354,13 +380,15 @@ class CanPlaceDep { }) /* istanbul ignore next */ debug(() => { - if (this.children.some(c => c.dep === cpp.dep)) + if (this.children.some(c => c.dep === cpp.dep)) { throw new Error('checking same dep repeatedly') + } }) this.children.push(cpp) - if (cpp.canPlace === CONFLICT) + if (cpp.canPlace === CONFLICT) { sawConflict = true + } } this._canPlacePeers = sawConflict ? CONFLICT : state diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js b/deps/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js index 8254c3f7a55e99..016ce6017b01e4 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js @@ -10,8 +10,9 @@ module.exports = class Map extends OGMap { constructor (items = []) { super() this[_keys] = new OGMap() - for (const [key, val] of items) + for (const [key, val] of items) { this.set(key, val) + } } [_normKey] (key) { @@ -26,8 +27,9 @@ module.exports = class Map extends OGMap { set (key, val) { const normKey = this[_normKey](key) - if (this[_keys].has(normKey)) + if (this[_keys].has(normKey)) { super.delete(this[_keys].get(normKey)) + } this[_keys].set(normKey, key) return super.set(key, val) } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js index 32276482419017..e34e40a46d0024 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js @@ -5,8 +5,9 @@ const npa = require('npm-package-arg') const relpath = require('./relpath.js') const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { - if (!resolved) + if (!resolved) { return null + } try { const hostedOpt = { noCommittish: false } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js b/deps/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js index 9c433a7652da2f..2c6647f5db7bad 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js @@ -5,11 +5,13 @@ const deepestNestingTarget = (start, name) => { for (const target of start.ancestry()) { // note: this will skip past the first target if edge is peer - if (target.isProjectRoot || !target.resolveParent || target.globalTop) + if (target.isProjectRoot || !target.resolveParent || target.globalTop) { return target + } const targetEdge = target.edgesOut.get(name) - if (!targetEdge || !targetEdge.peer) + if (!targetEdge || !targetEdge.peer) { return target + } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js b/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js index 01e5e21e94ce5d..d80437f20c8e4d 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/dep-valid.js @@ -44,8 +44,9 @@ const depValid = (child, requested, requestor) => { switch (requested.type) { case 'range': - if (requested.fetchSpec === '*') + if (requested.fetchSpec === '*') { return true + } // fallthrough case 'version': // if it's a version or a range other than '*', semver it @@ -108,17 +109,20 @@ const depValid = (child, requested, requestor) => { } const tarballValid = (child, requested, requestor) => { - if (child.isLink) + if (child.isLink) { return false + } - if (child.resolved) + if (child.resolved) { return child.resolved.replace(/\\/g, '/') === `file:${requested.fetchSpec.replace(/\\/g, '/')}` + } // if we have a legacy mutated package.json file. we can't be 100% // sure that it resolved to the same file, but if it was the same // request, that's a pretty good indicator of sameness. - if (child.package._requested) + if (child.package._requested) { return child.package._requested.saveSpec === requested.saveSpec + } // ok, we're probably dealing with some legacy cruft here, not much // we can do at this point unfortunately. diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/diff.js b/deps/npm/node_modules/@npmcli/arborist/lib/diff.js index 2008ef7a35bddf..0d17bde9583ac7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/diff.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/diff.js @@ -31,7 +31,12 @@ class Diff { this.removed = [] } - static calculate ({actual, ideal, filterNodes = [], shrinkwrapInflated = new Set()}) { + static calculate ({ + actual, + ideal, + filterNodes = [], + shrinkwrapInflated = new Set(), + }) { // if there's a filterNode, then: // - get the path from the root to the filterNode. The root or // root.target should have an edge either to the filterNode or @@ -43,8 +48,9 @@ class Diff { const extraneous = new Set() for (const filterNode of filterNodes) { const { root } = filterNode - if (root !== ideal && root !== actual) + if (root !== ideal && root !== actual) { throw new Error('invalid filterNode: outside idealTree/actualTree') + } const rootTarget = root.target const edge = [...rootTarget.edgesOut.values()].filter(e => { return e.to && (e.to === filterNode || e.to.target === filterNode) @@ -73,8 +79,9 @@ class Diff { : [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to) if (actualNode) { for (const child of actualNode.children.values()) { - if (child.extraneous) + if (child.extraneous) { extraneous.add(child) + } } } @@ -82,8 +89,9 @@ class Diff { }, }) } - for (const extra of extraneous) + for (const extra of extraneous) { filterSet.add(extra) + } return depth({ tree: new Diff({actual, ideal, filterSet, shrinkwrapInflated}), @@ -94,23 +102,27 @@ class Diff { } const getAction = ({actual, ideal}) => { - if (!ideal) + if (!ideal) { return 'REMOVE' + } // bundled meta-deps are copied over to the ideal tree when we visit it, // so they'll appear to be missing here. There's no need to handle them // in the diff, though, because they'll be replaced at reify time anyway // Otherwise, add the missing node. - if (!actual) + if (!actual) { return ideal.inDepBundle ? null : 'ADD' + } // always ignore the root node - if (ideal.isRoot && actual.isRoot) + if (ideal.isRoot && actual.isRoot) { return null + } // if the versions don't match, it's a change no matter what - if (ideal.version !== actual.version) + if (ideal.version !== actual.version) { return 'CHANGE' + } const binsExist = ideal.binPaths.every((path) => existsSync(path)) @@ -125,33 +137,38 @@ const getAction = ({actual, ideal}) => { const noIntegrity = !ideal.integrity && !actual.integrity const noResolved = !ideal.resolved && !actual.resolved const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved - if (noIntegrity && binsExist && (resolvedMatch || noResolved)) + if (noIntegrity && binsExist && (resolvedMatch || noResolved)) { return null + } // otherwise, verify that it's the same bits // note that if ideal has integrity, and resolved doesn't, we treat // that as a 'change', so that it gets re-fetched and locked down. const integrityMismatch = !ideal.integrity || !actual.integrity || !ssri.parse(ideal.integrity).match(actual.integrity) - if (integrityMismatch || !binsExist) + if (integrityMismatch || !binsExist) { return 'CHANGE' + } return null } const allChildren = node => { - if (!node) + if (!node) { return new Map() + } // if the node is root, and also a link, then what we really // want is to traverse the target's children - if (node.isRoot && node.isLink) + if (node.isRoot && node.isLink) { return allChildren(node.target) + } const kids = new Map() for (const n of [node, ...node.fsChildren]) { - for (const kid of n.children.values()) + for (const kid of n.children.values()) { kids.set(kid.path, kid) + } } return kids } @@ -160,7 +177,14 @@ const allChildren = node => { // to create the diff tree const getChildren = diff => { const children = [] - const {actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated} = diff + const { + actual, + ideal, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + } = diff // Note: we DON'T diff fsChildren themselves, because they are either // included in the package contents, or part of some other project, and @@ -182,26 +206,45 @@ const getChildren = diff => { for (const path of paths) { const actual = actualKids.get(path) const ideal = idealKids.get(path) - diffNode(actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated) + diffNode({ + actual, + ideal, + children, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + }) } - if (diff.leaves && !children.length) + if (diff.leaves && !children.length) { diff.leaves.push(diff) + } return children } -const diffNode = (actual, ideal, children, unchanged, removed, filterSet, shrinkwrapInflated) => { - if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) +const diffNode = ({ + actual, + ideal, + children, + unchanged, + removed, + filterSet, + shrinkwrapInflated, +}) => { + if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) { return + } const action = getAction({actual, ideal}) // if it's a match, then get its children // otherwise, this is the child diff node if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) { - if (action === 'REMOVE') + if (action === 'REMOVE') { removed.push(actual) + } children.push(new Diff({actual, ideal, filterSet, shrinkwrapInflated})) } else { unchanged.push(ideal) @@ -227,13 +270,22 @@ const diffNode = (actual, ideal, children, unchanged, removed, filterSet, shrink if (actual && bd && bd.length) { const bundledChildren = [] for (const node of actual.children.values()) { - if (node.inBundle) + if (node.inBundle) { bundledChildren.push(node) + } } - for (const node of bundledChildren) + for (const node of bundledChildren) { node.parent = ideal + } } - children.push(...getChildren({actual, ideal, unchanged, removed, filterSet, shrinkwrapInflated})) + children.push(...getChildren({ + actual, + ideal, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + })) } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/edge.js b/deps/npm/node_modules/@npmcli/arborist/lib/edge.js index 9d5ece40e5fae0..777ecc44a7c00e 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/edge.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/edge.js @@ -45,22 +45,26 @@ class Edge { constructor (options) { const { type, name, spec, accept, from } = options - if (typeof spec !== 'string') + if (typeof spec !== 'string') { throw new TypeError('must provide string spec') + } - if (type === 'workspace' && npa(spec).type !== 'directory') + if (type === 'workspace' && npa(spec).type !== 'directory') { throw new TypeError('workspace edges must be a symlink') + } this[_spec] = spec if (accept !== undefined) { - if (typeof accept !== 'string') + if (typeof accept !== 'string') { throw new TypeError('accept field must be a string if provided') + } this[_accept] = accept || '*' } - if (typeof name !== 'string') + if (typeof name !== 'string') { throw new TypeError('must provide dependency name') + } this[_name] = name if (!types.has(type)) { @@ -69,20 +73,23 @@ class Edge { `(valid types are: ${Edge.types.join(', ')})`) } this[_type] = type - if (!from) + if (!from) { throw new TypeError('must provide "from" node') + } this[_setFrom](from) this[_error] = this[_loadError]() this.overridden = false } satisfiedBy (node) { - return node.name === this.name && depValid(node, this.spec, this.accept, this.from) + return node.name === this.name && + depValid(node, this.spec, this.accept, this.from) } explain (seen = []) { - if (this[_explanation]) + if (this[_explanation]) { return this[_explanation] + } return this[_explanation] = this[_explain](seen) } @@ -101,8 +108,9 @@ class Edge { } get bundled () { - if (!this.from) + if (!this.from) { return false + } const { package: { bundleDependencies = [] } } = this.from return bundleDependencies.includes(this.name) } @@ -175,20 +183,24 @@ class Edge { this[_explanation] = null const newTo = this[_from].resolve(this.name) if (newTo !== this[_to]) { - if (this[_to]) + if (this[_to]) { this[_to].edgesIn.delete(this) + } this[_to] = newTo this[_error] = this[_loadError]() - if (this[_to]) + if (this[_to]) { this[_to].addEdgeIn(this) - } else if (hard) + } + } else if (hard) { this[_error] = this[_loadError]() + } } detach () { this[_explanation] = null - if (this[_to]) + if (this[_to]) { this[_to].edgesIn.delete(this) + } this[_from].edgesOut.delete(this.name) this[_to] = null this[_error] = 'DETACHED' @@ -198,8 +210,9 @@ class Edge { [_setFrom] (node) { this[_explanation] = null this[_from] = node - if (node.edgesOut.has(this.name)) + if (node.edgesOut.has(this.name)) { node.edgesOut.get(this.name).detach() + } node.addEdgeOut(this) this.reload() } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js b/deps/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js index 1dc9a0b188eaa4..2c85a640fddfb1 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js @@ -14,8 +14,9 @@ const gatherDepSet = (set, edgeFilter) => { // as the deps set increases in size. for (const node of deps) { for (const edge of node.edgesOut.values()) { - if (edge.to && edgeFilter(edge)) + if (edge.to && edgeFilter(edge)) { deps.add(edge.to) + } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/inventory.js b/deps/npm/node_modules/@npmcli/arborist/lib/inventory.js index a4ae11c2ab41e9..34b6f98a8b2866 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/inventory.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/inventory.js @@ -13,11 +13,13 @@ const debug = require('./debug.js') const getLicense = pkg => { if (pkg) { const lic = pkg.license || pkg.licence - if (lic) + if (lic) { return lic + } const lics = pkg.licenses || pkg.licences - if (Array.isArray(lics)) + if (Array.isArray(lics)) { return lics[0] + } } } @@ -42,8 +44,9 @@ class Inventory extends Map { * filter (fn) { for (const node of this.values()) { - if (fn(node)) + if (fn(node)) { yield node + } } } @@ -62,8 +65,9 @@ class Inventory extends Map { const current = super.get(node[this.primaryKey]) if (current) { - if (current === node) + if (current === node) { return + } this.delete(current) } super.set(node[this.primaryKey], node) @@ -85,8 +89,9 @@ class Inventory extends Map { } delete (node) { - if (!this.has(node)) + if (!this.has(node)) { return + } super.delete(node[this.primaryKey]) for (const [key, map] of this[_index].entries()) { @@ -95,8 +100,9 @@ class Inventory extends Map { const set = map.get(val) if (set) { set.delete(node) - if (set.size === 0) + if (set.size === 0) { map.delete(node[key]) + } } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/link.js b/deps/npm/node_modules/@npmcli/arborist/lib/link.js index 4d15428d873602..0289e04151ef55 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/link.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/link.js @@ -11,8 +11,9 @@ class Link extends Node { constructor (options) { const { root, realpath, target, parent, fsParent } = options - if (!realpath && !(target && target.path)) + if (!realpath && !(target && target.path)) { throw new TypeError('must provide realpath for Link node') + } super({ ...options, @@ -23,11 +24,11 @@ class Link extends Node { : null), }) - if (target) + if (target) { this.target = target - else if (this.realpath === this.root.path) + } else if (this.realpath === this.root.path) { this.target = this.root - else { + } else { this.target = new Node({ ...options, path: realpath, @@ -48,8 +49,9 @@ class Link extends Node { set target (target) { const current = this[_target] - if (target === current) + if (target === current) { return + } if (current && current.then) { debug(() => { @@ -72,25 +74,28 @@ class Link extends Node { } if (!target) { - if (current && current.linksIn) + if (current && current.linksIn) { current.linksIn.delete(this) + } if (this.path) { this[_delistFromMeta]() this[_target] = null this.package = {} this[_refreshLocation]() - } else + } else { this[_target] = null + } return } if (!this.path) { // temp node pending assignment to a tree // we know it's not in the inventory yet, because no path. - if (target.path) + if (target.path) { this.realpath = target.path - else + } else { target.path = target.realpath = this.realpath + } target.root = this.root this[_target] = target target.linksIn.add(this) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/node.js b/deps/npm/node_modules/@npmcli/arborist/lib/node.js index 5616019dd9cc2f..a872f24805b59c 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/node.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/node.js @@ -120,8 +120,9 @@ class Node { // should be equal if not a link this.path = path ? resolve(path) : null - if (!this.name && (!this.path || this.path !== dirname(this.path))) + if (!this.name && (!this.path || this.path !== dirname(this.path))) { throw new TypeError('could not detect node name from path or package') + } this.realpath = !this.isLink ? this.path : resolve(realpath) @@ -142,8 +143,9 @@ class Node { // // Otherwise, hopefully a shrinkwrap will help us out. const resolved = consistentResolve(pkg._resolved) - if (resolved && !(/^file:/.test(resolved) && pkg._where)) + if (resolved && !(/^file:/.test(resolved) && pkg._where)) { this.resolved = resolved + } } this.integrity = integrity || pkg._integrity || null this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false @@ -215,18 +217,21 @@ class Node { // see parent/root setters below. // root is set to parent's root if we have a parent, otherwise if it's // null, then it's set to the node itself. - if (!parent && !fsParent) + if (!parent && !fsParent) { this.root = root || null + } // mostly a convenience for testing, but also a way to create // trees in a more declarative way than setting parent on each if (children) { - for (const c of children) + for (const c of children) { new Node({ ...c, parent: this }) + } } if (fsChildren) { - for (const c of fsChildren) + for (const c of fsChildren) { new Node({ ...c, fsParent: this }) + } } // now load all the dep edges @@ -239,8 +244,9 @@ class Node { set meta (meta) { this[_meta] = meta - if (meta) + if (meta) { meta.add(this) + } } get global () { @@ -260,8 +266,9 @@ class Node { // deletes edges if they already exists if (this[_workspaces]) { for (const name of this[_workspaces].keys()) { - if (!workspaces.has(name)) + if (!workspaces.has(name)) { this.edgesOut.get(name).detach() + } } } @@ -271,8 +278,9 @@ class Node { } get binPaths () { - if (!this.parent) + if (!this.parent) { return [] + } return getBinPaths({ pkg: this[_package], @@ -319,8 +327,9 @@ class Node { // only do this more than once at the root level, so the resolve() calls // are only one level deep, and there's not much to be saved, anyway. // simpler to just toss them all out. - for (const edge of this.edgesOut.values()) + for (const edge of this.edgesOut.values()) { edge.detach() + } this[_explanation] = null /* istanbul ignore next - should be impossible */ @@ -341,8 +350,9 @@ class Node { // node.explain(nodes seen already, edge we're trying to satisfy // if edge is not specified, it lists every edge into the node. explain (edge = null, seen = []) { - if (this[_explanation]) + if (this[_explanation]) { return this[_explanation] + } return this[_explanation] = this[_explain](edge, seen) } @@ -374,11 +384,13 @@ class Node { } } - if (this.sourceReference) + if (this.sourceReference) { return this.sourceReference.explain(edge, seen) + } - if (seen.includes(this)) + if (seen.includes(this)) { return why + } why.location = this.location why.isWorkspace = this.isWorkspace @@ -387,56 +399,64 @@ class Node { seen = seen.concat(this) why.dependents = [] - if (edge) + if (edge) { why.dependents.push(edge.explain(seen)) - else { + } else { // ignore invalid edges, since those aren't satisfied by this thing, // and are not keeping it held in this spot anyway. const edges = [] for (const edge of this.edgesIn) { - if (!edge.valid && !edge.from.isProjectRoot) + if (!edge.valid && !edge.from.isProjectRoot) { continue + } edges.push(edge) } - for (const edge of edges) + for (const edge of edges) { why.dependents.push(edge.explain(seen)) + } } - if (this.linksIn.size) + if (this.linksIn.size) { why.linksIn = [...this.linksIn].map(link => link[_explain](edge, seen)) + } return why } isDescendantOf (node) { for (let p = this; p; p = p.resolveParent) { - if (p === node) + if (p === node) { return true + } } return false } getBundler (path = []) { // made a cycle, definitely not bundled! - if (path.includes(this)) + if (path.includes(this)) { return null + } path.push(this) const parent = this[_parent] - if (!parent) + if (!parent) { return null + } const pBundler = parent.getBundler(path) - if (pBundler) + if (pBundler) { return pBundler + } const ppkg = parent.package const bd = ppkg && ppkg.bundleDependencies // explicit bundling - if (Array.isArray(bd) && bd.includes(this.name)) + if (Array.isArray(bd) && bd.includes(this.name)) { return parent + } // deps that are deduped up to the bundling level are bundled. // however, if they get their dep met further up than that, @@ -444,11 +464,13 @@ class Node { // unmet bundled deps will not cause your deps to be bundled. for (const edge of this.edgesIn) { const eBundler = edge.from.getBundler(path) - if (!eBundler) + if (!eBundler) { continue + } - if (eBundler === parent) + if (eBundler === parent) { return eBundler + } } return null @@ -467,8 +489,9 @@ class Node { } get isWorkspace () { - if (this.isProjectRoot) + if (this.isProjectRoot) { return false + } const { root } = this const { type, to } = root.edgesOut.get(this.packageName) || {} return type === 'workspace' && to && (to.target === this || to === this) @@ -486,15 +509,17 @@ class Node { } * ancestry () { - for (let anc = this; anc; anc = anc.resolveParent) + for (let anc = this; anc; anc = anc.resolveParent) { yield anc + } } set root (root) { // setting to null means this is the new root // should only ever be one step - while (root && root.root !== root) + while (root && root.root !== root) { root = root.root + } root = root || this @@ -504,8 +529,9 @@ class Node { // can't set the root (yet) if there's no way to determine location // this allows us to do new Node({...}) and then set the root later. // just make the assignment so we don't lose it, and move on. - if (!this.path || !root.realpath || !root.path) + if (!this.path || !root.realpath || !root.path) { return this[_root] = root + } // temporarily become a root node this[_root] = this @@ -521,8 +547,9 @@ class Node { if (this.isLink) { if (target) { target.linksIn.delete(this) - if (target.root === this) + if (target.root === this) { target[_delistFromMeta]() + } } this[_target] = null } @@ -539,16 +566,17 @@ class Node { this[_fsParent] = null } - if (root === this) + if (root === this) { this[_refreshLocation]() - else { + } else { // setting to some different node. const loc = relpath(root.realpath, this.path) const current = root.inventory.get(loc) // clobber whatever is there now - if (current) + if (current) { current.root = null + } this[_root] = root // set this.location and add to inventory @@ -556,8 +584,9 @@ class Node { // try to find our parent/fsParent in the new root inventory for (const p of walkUp(dirname(this.path))) { - if (p === this.path) + if (p === this.path) { continue + } const ploc = relpath(root.realpath, p) const parent = root.inventory.get(ploc) if (parent) { @@ -576,8 +605,9 @@ class Node { const isParent = this.location === childLoc if (isParent) { const oldChild = parent.children.get(this.name) - if (oldChild && oldChild !== this) + if (oldChild && oldChild !== this) { oldChild.root = null + } if (this.parent) { this.parent.children.delete(this.name) this.parent[_reloadNamedEdges](this.name) @@ -586,13 +616,15 @@ class Node { this[_parent] = parent // don't do it for links, because they don't have a target yet // we'll hit them up a bit later on. - if (!this.isLink) + if (!this.isLink) { parent[_reloadNamedEdges](this.name) + } } else { /* istanbul ignore if - should be impossible, since we break * all fsParent/child relationships when moving? */ - if (this.fsParent) + if (this.fsParent) { this.fsParent.fsChildren.delete(this) + } parent.fsChildren.add(this) this[_fsParent] = parent } @@ -601,10 +633,11 @@ class Node { } // if it doesn't have a parent, it's a top node - if (!this.parent) + if (!this.parent) { root.tops.add(this) - else + } else { root.tops.delete(this) + } // assign parentage for any nodes that need to have this as a parent // this can happen when we have a node at nm/a/nm/b added *before* @@ -614,24 +647,30 @@ class Node { const nmloc = `${this.location}${this.location ? '/' : ''}node_modules/` const isChild = n => n.location === nmloc + n.name // check dirname so that /foo isn't treated as the fsparent of /foo-bar - const isFsChild = n => dirname(n.path).startsWith(this.path) && - n !== this && - !n.parent && - (!n.fsParent || n.fsParent === this || dirname(this.path).startsWith(n.fsParent.path)) + const isFsChild = n => { + return dirname(n.path).startsWith(this.path) && + n !== this && + !n.parent && + (!n.fsParent || + n.fsParent === this || + dirname(this.path).startsWith(n.fsParent.path)) + } const isKid = n => isChild(n) || isFsChild(n) // only walk top nodes, since anything else already has a parent. for (const child of root.tops) { - if (!isKid(child)) + if (!isKid(child)) { continue + } // set up the internal parentage links - if (this.isLink) + if (this.isLink) { child.root = null - else { + } else { // can't possibly have a parent, because it's in tops - if (child.fsParent) + if (child.fsParent) { child.fsParent.fsChildren.delete(child) + } child[_fsParent] = null if (isChild(child)) { this.children.set(child.name, child) @@ -648,13 +687,15 @@ class Node { // to that realpath, or a thing at that realpath if we're adding a link // (if we're adding a regular node, we already deleted the old one) for (const node of root.inventory.query('realpath', this.realpath)) { - if (node === this) + if (node === this) { continue + } /* istanbul ignore next - should be impossible */ debug(() => { - if (node.root !== root) + if (node.root !== root) { throw new Error('inventory contains node from other root') + } }) if (this.isLink) { @@ -663,8 +704,9 @@ class Node { this[_package] = target.package target.linksIn.add(this) // reload edges here, because now we have a target - if (this.parent) + if (this.parent) { this.parent[_reloadNamedEdges](this.name) + } break } else { /* istanbul ignore else - should be impossible */ @@ -672,8 +714,9 @@ class Node { node[_target] = this node[_package] = this.package this.linksIn.add(node) - if (node.parent) + if (node.parent) { node.parent[_reloadNamedEdges](node.name) + } } else { debug(() => { throw Object.assign(new Error('duplicate node in root setter'), { @@ -690,14 +733,16 @@ class Node { // reload all edgesIn where the root doesn't match, so we don't have // cross-tree dependency graphs for (const edge of this.edgesIn) { - if (edge.from.root !== root) + if (edge.from.root !== root) { edge.reload() + } } // reload all edgesOut where root doens't match, or is missing, since // it might not be missing in the new tree for (const edge of this.edgesOut.values()) { - if (!edge.to || edge.to.root !== root) + if (!edge.to || edge.to.root !== root) { edge.reload() + } } // now make sure our family comes along for the ride! @@ -721,15 +766,17 @@ class Node { } } for (const child of family) { - if (child.root !== root) + if (child.root !== root) { child.root = root + } } // if we had a target, and didn't find one in the new root, then bring // it over as well, but only if we're setting the link into a new root, // as we don't want to lose the target any time we remove a link. - if (this.isLink && target && !this.target && root !== this) + if (this.isLink && target && !this.target && root !== this) { target.root = root + } // tree should always be valid upon root setter completion. treeCheck(this) @@ -741,11 +788,13 @@ class Node { } [_loadWorkspaces] () { - if (!this[_workspaces]) + if (!this[_workspaces]) { return + } - for (const [name, path] of this[_workspaces].entries()) + for (const [name, path] of this[_workspaces].entries()) { new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' }) + } } [_loadDeps] () { @@ -764,10 +813,11 @@ class Node { const peerDependencies = {} const peerOptional = {} for (const [name, dep] of Object.entries(pd)) { - if (pm[name] && pm[name].optional) + if (pm[name] && pm[name].optional) { peerOptional[name] = dep - else + } else { peerDependencies[name] = dep + } } this[_loadDepType](peerDependencies, 'peer') this[_loadDepType](peerOptional, 'peerOptional') @@ -784,8 +834,9 @@ class Node { } = sourceReference || {} const thisDev = isTop && !globalTop && path const srcDev = !sourceReference || srcTop && !srcGlobalTop && srcPath - if (thisDev && srcDev) + if (thisDev && srcDev) { this[_loadDepType](this.package.devDependencies, 'dev') + } } [_loadDepType] (deps, type) { @@ -794,8 +845,9 @@ class Node { // prioritize a new edge over an existing one for (const [name, spec] of Object.entries(deps || {})) { const current = this.edgesOut.get(name) - if (!current || current.type !== 'workspace') + if (!current || current.type !== 'workspace') { new Edge({ from: this, name, spec, accept: ad[name], type }) + } } } @@ -803,25 +855,29 @@ class Node { const parent = this[_fsParent] /* istanbul ignore next - should be impossible */ debug(() => { - if (parent === this) + if (parent === this) { throw new Error('node set to its own fsParent') + } }) return parent } set fsParent (fsParent) { if (!fsParent) { - if (this[_fsParent]) + if (this[_fsParent]) { this.root = null + } return } debug(() => { - if (fsParent === this) + if (fsParent === this) { throw new Error('setting node to its own fsParent') + } - if (fsParent.realpath === this.realpath) + if (fsParent.realpath === this.realpath) { throw new Error('setting fsParent to same path') + } // the initial set MUST be an actual walk-up from the realpath // subsequent sets will re-root on the new fsParent's path. @@ -837,16 +893,19 @@ class Node { } }) - if (fsParent.isLink) + if (fsParent.isLink) { fsParent = fsParent.target + } // setting a thing to its own fsParent is not normal, but no-op for safety - if (this === fsParent || fsParent.realpath === this.realpath) + if (this === fsParent || fsParent.realpath === this.realpath) { return + } // nothing to do - if (this[_fsParent] === fsParent) + if (this[_fsParent] === fsParent) { return + } const oldFsParent = this[_fsParent] const newPath = !oldFsParent ? this.path @@ -874,11 +933,13 @@ class Node { } // update this.path/realpath for this and all children/fsChildren - if (pathChange) + if (pathChange) { this[_changePath](newPath) + } - if (oldParent) + if (oldParent) { oldParent[_reloadNamedEdges](oldName) + } // clobbers anything at that path, resets all appropriate references this.root = fsParent.root @@ -894,11 +955,13 @@ class Node { // will go ahead and create the invalid state, and then try to resolve // it with more tree construction, because it's a user request. canReplaceWith (node, ignorePeers = []) { - if (node.name !== this.name) + if (node.name !== this.name) { return false + } - if (node.packageName !== this.packageName) + if (node.packageName !== this.packageName) { return false + } ignorePeers = new Set(ignorePeers) @@ -915,12 +978,14 @@ class Node { edge.from.parent === this.parent && edge.peer && ignorePeers.has(edge.from.name) - if (ignored) + if (ignored) { continue + } // only care about edges that don't originate from this node - if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) + if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) { return false + } } return true @@ -935,41 +1000,49 @@ class Node { // to if it was removed, or nothing is depending on it in the first place. canDedupe (preferDedupe = false) { // not allowed to mess with shrinkwraps or bundles - if (this.inDepBundle || this.inShrinkwrap) + if (this.inDepBundle || this.inShrinkwrap) { return false + } // it's a top level pkg, or a dep of one - if (!this.resolveParent || !this.resolveParent.resolveParent) + if (!this.resolveParent || !this.resolveParent.resolveParent) { return false + } // no one wants it, remove it - if (this.edgesIn.size === 0) + if (this.edgesIn.size === 0) { return true + } const other = this.resolveParent.resolveParent.resolve(this.name) // nothing else, need this one - if (!other) + if (!other) { return false + } // if it's the same thing, then always fine to remove - if (other.matches(this)) + if (other.matches(this)) { return true + } // if the other thing can't replace this, then skip it - if (!other.canReplace(this)) + if (!other.canReplace(this)) { return false + } // if we prefer dedupe, or if the version is greater/equal, take the other - if (preferDedupe || semver.gte(other.version, this.version)) + if (preferDedupe || semver.gte(other.version, this.version)) { return true + } return false } satisfies (requested) { - if (requested instanceof Edge) + if (requested instanceof Edge) { return this.name === requested.name && requested.satisfiedBy(this) + } const parsed = npa(requested) const { name = this.name, rawSpec: spec } = parsed @@ -983,29 +1056,35 @@ class Node { matches (node) { // if the nodes are literally the same object, obviously a match. - if (node === this) + if (node === this) { return true + } // if the names don't match, they're different things, even if // the package contents are identical. - if (node.name !== this.name) + if (node.name !== this.name) { return false + } // if they're links, they match if the targets match - if (this.isLink) + if (this.isLink) { return node.isLink && this.target.matches(node.target) + } // if they're two project root nodes, they're different if the paths differ - if (this.isProjectRoot && node.isProjectRoot) + if (this.isProjectRoot && node.isProjectRoot) { return this.path === node.path + } // if the integrity matches, then they're the same. - if (this.integrity && node.integrity) + if (this.integrity && node.integrity) { return this.integrity === node.integrity + } // if no integrity, check resolved - if (this.resolved && node.resolved) + if (this.resolved && node.resolved) { return this.resolved === node.resolved + } // if no resolved, check both package name and version // otherwise, conclude that they are different things @@ -1031,39 +1110,44 @@ class Node { // parent's children map, and leave it at that. const nameMatch = node.parent && node.parent.children.get(this.name) === node - if (nameMatch) + if (nameMatch) { this.path = resolve(node.parent.path, 'node_modules', this.name) - else { + } else { this.path = node.path this.name = node.name } - if (!this.isLink) + if (!this.isLink) { this.realpath = this.path + } this[_refreshLocation]() // keep children when a node replaces another if (!this.isLink) { - for (const kid of node.children.values()) + for (const kid of node.children.values()) { kid.parent = this + } } - if (!node.isRoot) + if (!node.isRoot) { this.root = node.root + } treeCheck(this) } get inShrinkwrap () { - return this.parent && (this.parent.hasShrinkwrap || this.parent.inShrinkwrap) + return this.parent && + (this.parent.hasShrinkwrap || this.parent.inShrinkwrap) } get parent () { const parent = this[_parent] /* istanbul ignore next - should be impossible */ debug(() => { - if (parent === this) + if (parent === this) { throw new Error('node set to its own parent') + } }) return parent } @@ -1083,23 +1167,27 @@ class Node { if (!parent) { // but only delete it if we actually had a parent in the first place // otherwise it's just setting to null when it's already null - if (this[_parent]) + if (this[_parent]) { this.root = null + } return } - if (parent.isLink) + if (parent.isLink) { parent = parent.target + } // setting a thing to its own parent is not normal, but no-op for safety - if (this === parent) + if (this === parent) { return + } const oldParent = this[_parent] // nothing to do - if (oldParent === parent) + if (oldParent === parent) { return + } // ok now we know something is actually changing, and parent is not a link const newPath = resolve(parent.path, 'node_modules', this.name) @@ -1116,8 +1204,9 @@ class Node { } // update this.path/realpath for this and all children/fsChildren - if (pathChange) + if (pathChange) { this[_changePath](newPath) + } // clobbers anything at that path, resets all appropriate references this.root = parent.root @@ -1127,16 +1216,19 @@ class Node { // Removes the node from its root the metadata and inventory. [_delistFromMeta] () { const root = this.root - if (!root.realpath || !this.path) + if (!root.realpath || !this.path) { return + } root.inventory.delete(this) root.tops.delete(this) - if (root.meta) + if (root.meta) { root.meta.delete(this.path) + } /* istanbul ignore next - should be impossible */ debug(() => { - if ([...root.inventory.values()].includes(this)) + if ([...root.inventory.values()].includes(this)) { throw new Error('failed to delist') + } }) } @@ -1148,8 +1240,9 @@ class Node { this.path = newPath const namePattern = /(?:^|\/|\\)node_modules[\\/](@[^/\\]+[\\/][^\\/]+|[^\\/]+)$/ const nameChange = newPath.match(namePattern) - if (nameChange && this.name !== nameChange[1]) + if (nameChange && this.name !== nameChange[1]) { this.name = nameChange[1].replace(/\\/g, '/') + } // if we move a link target, update link realpaths if (!this.isLink) { @@ -1161,10 +1254,12 @@ class Node { } } // if we move /x to /y, then a module at /x/a/b becomes /y/a/b - for (const child of this.fsChildren) + for (const child of this.fsChildren) { child[_changePath](resolve(newPath, relative(oldPath, child.path))) - for (const [name, child] of this.children.entries()) + } + for (const [name, child] of this.children.entries()) { child[_changePath](resolve(newPath, 'node_modules', name)) + } this[_refreshLocation]() } @@ -1179,8 +1274,9 @@ class Node { this.location = loc root.inventory.add(this) - if (root.meta) + if (root.meta) { root.meta.add(this) + } } addEdgeOut (edge) { @@ -1191,8 +1287,9 @@ class Node { this.edgesIn.add(edge) // try to get metadata from the yarn.lock file - if (this.root.meta) + if (this.root.meta) { this.root.meta.addEdge(edge) + } } [_reloadNamedEdges] (name, rootLoc = this.location) { @@ -1202,13 +1299,16 @@ class Node { edge.to.location === `${rootLoc}/node_modules/${edge.name}` const sameResolved = edge && this.resolve(name) === edge.to const recheck = rootLocResolved || !sameResolved - if (edge && recheck) + if (edge && recheck) { edge.reload(true) - for (const c of this.children.values()) + } + for (const c of this.children.values()) { c[_reloadNamedEdges](name, rootLoc) + } - for (const c of this.fsChildren) + for (const c of this.fsChildren) { c[_reloadNamedEdges](name, rootLoc) + } } get isLink () { @@ -1255,15 +1355,18 @@ class Node { /* istanbul ignore next - should be impossible, * but I keep doing this mistake in tests */ debug(() => { - if (typeof name !== 'string' || !name) + if (typeof name !== 'string' || !name) { throw new Error('non-string passed to Node.resolve') + } }) const mine = this.children.get(name) - if (mine) + if (mine) { return mine + } const resolveParent = this.resolveParent - if (resolveParent) + if (resolveParent) { return resolveParent.resolve(name) + } return null } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/optional-set.js b/deps/npm/node_modules/@npmcli/arborist/lib/optional-set.js index 9472158bc44be9..9f5184ea024420 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/optional-set.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/optional-set.js @@ -10,8 +10,9 @@ const gatherDepSet = require('./gather-dep-set.js') const optionalSet = node => { - if (!node.optional) + if (!node.optional) { return new Set() + } // start with the node, then walk up the dependency graph until we // get to the boundaries that define the optional set. since the @@ -21,8 +22,9 @@ const optionalSet = node => { const set = new Set([node]) for (const node of set) { for (const edge of node.edgesIn) { - if (!edge.optional) + if (!edge.optional) { set.add(edge.from) + } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js b/deps/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js index 11f9a431607ec0..2c4322ee678ca5 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js @@ -15,12 +15,14 @@ const peerEntrySets = node => { const unionSet = new Set([node]) for (const node of unionSet) { for (const edge of node.edgesOut.values()) { - if (edge.valid && edge.peer && edge.to) + if (edge.valid && edge.peer && edge.to) { unionSet.add(edge.to) + } } for (const edge of node.edgesIn) { - if (edge.valid && edge.peer) + if (edge.valid && edge.peer) { unionSet.add(edge.from) + } } } const entrySets = new Map() @@ -28,16 +30,18 @@ const peerEntrySets = node => { for (const edge of peer.edgesIn) { // if not valid, it doesn't matter anyway. either it's been previously // overridden, or it's the thing we're interested in replacing. - if (!edge.valid) + if (!edge.valid) { continue + } // this is the entry point into the peer set if (!edge.peer || edge.from.isTop) { // get the subset of peer brought in by this peer entry edge const sub = new Set([peer]) for (const peer of sub) { for (const edge of peer.edgesOut.values()) { - if (edge.valid && edge.peer && edge.to) + if (edge.valid && edge.peer && edge.to) { sub.add(edge.to) + } } } // if this subset does not include the node we are focused on, @@ -60,8 +64,9 @@ const peerEntrySets = node => { // Edge(a->b) => Set(b, d, e, f, g) // Edge(a->d) => Set(d, e, f, g) // } - if (sub.has(node)) + if (sub.has(node)) { entrySets.set(edge, sub) + } } } } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js index c0023e74ad8eae..d7cc7d935afc8d 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/place-dep.js @@ -85,8 +85,9 @@ class PlaceDep { !edge.error && !explicitRequest && !updateNames.includes(edge.name) && - !this.isVulnerable(edge.to)) + !this.isVulnerable(edge.to)) { return + } // walk up the tree until we hit either a top/root node, or a place // where the dep is not a peer dep. @@ -110,8 +111,9 @@ class PlaceDep { // but we CAN place it under a, so the correct thing to do is keep // walking up the tree. const targetEdge = target.edgesOut.get(edge.name) - if (!target.isTop && targetEdge && targetEdge.peer) + if (!target.isTop && targetEdge && targetEdge.peer) { continue + } const cpd = new CanPlaceDep({ dep, @@ -141,34 +143,39 @@ class PlaceDep { // should treat (b) and (d) as OK, and place them in the last place // where they did not themselves conflict, and skip c@2 if conflict // is ok by virtue of being forced or not ours and not strict. - if (cpd.canPlaceSelf !== CONFLICT) + if (cpd.canPlaceSelf !== CONFLICT) { canPlaceSelf = cpd + } // we found a place this can go, along with all its peer friends. // we break when we get the first conflict - if (cpd.canPlace !== CONFLICT) + if (cpd.canPlace !== CONFLICT) { canPlace = cpd - else + } else { break + } // if it's a load failure, just plop it in the first place attempted, // since we're going to crash the build or prune it out anyway. // but, this will frequently NOT be a successful canPlace, because // it'll have no version or other information. - if (dep.errors.length) + if (dep.errors.length) { break + } // nest packages like npm v1 and v2 // very disk-inefficient - if (legacyBundling) + if (legacyBundling) { break + } // when installing globally, or just in global style, we never place // deps above the first level. if (globalStyle) { const rp = target.resolveParent - if (rp && rp.isProjectRoot) + if (rp && rp.isProjectRoot) { break + } } } @@ -183,8 +190,9 @@ class PlaceDep { if (!canPlace) { // if not forced, or it's our dep, or strictPeerDeps is set, then // this is an ERESOLVE error. - if (!this.conflictOk) + if (!this.conflictOk) { return this.failPeerConflict() + } // ok! we're gonna allow the conflict, but we should still warn // if we have a current, then we treat CONFLICT as a KEEP. @@ -237,8 +245,9 @@ class PlaceDep { // it's a conflict. Treat it as a KEEP, but warn and move on. if (placementType === KEEP) { // this was an overridden peer dep - if (edge.peer && !edge.valid) + if (edge.peer && !edge.valid) { this.warnPeerConflict() + } // if we get a KEEP in a update scenario, then we MAY have something // already duplicating this unnecessarily! For example: @@ -287,21 +296,24 @@ class PlaceDep { }) this.oldDep = target.children.get(this.name) - if (this.oldDep) + if (this.oldDep) { this.replaceOldDep() - else + } else { this.placed.parent = target + } // if it's an overridden peer dep, warn about it - if (edge.peer && !this.placed.satisfies(edge)) + if (edge.peer && !this.placed.satisfies(edge)) { this.warnPeerConflict() + } // If the edge is not an error, then we're updating something, and // MAY end up putting a better/identical node further up the tree in // a way that causes an unnecessary duplication. If so, remove the // now-unnecessary node. - if (edge.valid && edge.to && edge.to !== this.placed) + if (edge.valid && edge.to && edge.to !== this.placed) { this.pruneDedupable(edge.to, false) + } // in case we just made some duplicates that can be removed, // prune anything deeper in the tree that can be replaced by this @@ -310,8 +322,9 @@ class PlaceDep { this.pruneDedupable(node, false) // only walk the direct children of the ones we kept if (node.root === target.root) { - for (const kid of node.children.values()) + for (const kid of node.children.values()) { this.pruneDedupable(kid, false) + } } } } @@ -323,8 +336,9 @@ class PlaceDep { // otherwise they'd be gone and the peer set would change throughout // this loop. for (const peerEdge of this.placed.edgesOut.values()) { - if (peerEdge.valid || !peerEdge.peer || peerEdge.overridden) + if (peerEdge.valid || !peerEdge.peer || peerEdge.overridden) { continue + } const peer = virtualRoot.children.get(peerEdge.name) @@ -332,12 +346,14 @@ class PlaceDep { // it's an optional peer dep. If it's not being properly met (ie, // peerEdge.valid is false), then this is likely heading for an // ERESOLVE error, unless it can walk further up the tree. - if (!peer) + if (!peer) { continue + } // overridden peerEdge, just accept what's there already - if (!peer.satisfies(peerEdge)) + if (!peer.satisfies(peerEdge)) { continue + } this.children.push(new PlaceDep({ parent: this, @@ -363,8 +379,9 @@ class PlaceDep { // later anyway. const oldDeps = [] for (const [name, edge] of this.oldDep.edgesOut.entries()) { - if (!this.placed.edgesOut.has(name) && edge.to) + if (!this.placed.edgesOut.has(name) && edge.to) { oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) + } } this.placed.replace(this.oldDep) this.pruneForReplacement(this.placed, oldDeps) @@ -377,8 +394,9 @@ class PlaceDep { .filter(e => e.to && !e.valid).map(e => e.to)) for (const dep of oldDeps) { const set = gatherDepSet([dep], e => e.to !== dep && e.valid) - for (const dep of set) + for (const dep of set) { invalidDeps.add(dep) + } } // ignore dependency edges from the node being replaced, but @@ -388,8 +406,9 @@ class PlaceDep { edge.from !== node && edge.to !== node && edge.valid) // now just delete whatever's left, because it's junk - for (const dep of deps) + for (const dep of deps) { dep.root = null + } } // prune all the nodes in a branch of the tree that can be safely removed @@ -402,8 +421,9 @@ class PlaceDep { // the dep set, except for this node we're deduping, so that we // also prune deps that would be made extraneous. const deps = gatherDepSet([node], e => e.to !== node && e.valid) - for (const node of deps) + for (const node of deps) { node.root = null + } return } if (descend) { @@ -413,13 +433,15 @@ class PlaceDep { const nodeSort = (a, b) => a.location.localeCompare(b.location, 'en') const children = [...node.children.values()].sort(nodeSort) - for (const child of children) + for (const child of children) { this.pruneDedupable(child) + } const fsChildren = [...node.fsChildren].sort(nodeSort) for (const topNode of fsChildren) { const children = [...topNode.children.values()].sort(nodeSort) - for (const child of children) + for (const child of children) { this.pruneDedupable(child) + } } } } @@ -432,11 +454,13 @@ class PlaceDep { const { edge } = this.top const { from: node } = edge - if (node.isWorkspace || node.isProjectRoot) + if (node.isWorkspace || node.isProjectRoot) { return true + } - if (!edge.peer) + if (!edge.peer) { return false + } // re-entry case. check if any non-peer edges come from the project, // or any entryEdges on peer groups are from the root. @@ -446,13 +470,15 @@ class PlaceDep { hasPeerEdges = true continue } - if (edge.from.isWorkspace || edge.from.isProjectRoot) + if (edge.from.isWorkspace || edge.from.isProjectRoot) { return true + } } if (hasPeerEdges) { for (const edge of peerEntrySets(node).keys()) { - if (edge.from.isWorkspace || edge.from.isProjectRoot) + if (edge.from.isWorkspace || edge.from.isProjectRoot) { return true + } } } @@ -541,8 +567,9 @@ class PlaceDep { get allChildren () { const set = new Set(this.children) for (const child of set) { - for (const grandchild of child.children) + for (const grandchild of child.children) { set.add(grandchild) + } } return [...set] } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/printable.js b/deps/npm/node_modules/@npmcli/arborist/lib/printable.js index 4aa2fffd104b4a..af24ccb9592883 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/printable.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/printable.js @@ -7,45 +7,62 @@ const relpath = require('./relpath.js') class ArboristNode { constructor (tree, path) { this.name = tree.name - if (tree.packageName && tree.packageName !== this.name) + if (tree.packageName && tree.packageName !== this.name) { this.packageName = tree.packageName - if (tree.version) + } + if (tree.version) { this.version = tree.version + } this.location = tree.location this.path = tree.path - if (tree.realpath !== this.path) + if (tree.realpath !== this.path) { this.realpath = tree.realpath - if (tree.resolved !== null) + } + if (tree.resolved !== null) { this.resolved = tree.resolved - if (tree.extraneous) + } + if (tree.extraneous) { this.extraneous = true - if (tree.dev) + } + if (tree.dev) { this.dev = true - if (tree.optional) + } + if (tree.optional) { this.optional = true - if (tree.devOptional && !tree.dev && !tree.optional) + } + if (tree.devOptional && !tree.dev && !tree.optional) { this.devOptional = true - if (tree.peer) + } + if (tree.peer) { this.peer = true - if (tree.inBundle) + } + if (tree.inBundle) { this.bundled = true - if (tree.inDepBundle) + } + if (tree.inDepBundle) { this.bundler = tree.getBundler().location - if (tree.isProjectRoot) + } + if (tree.isProjectRoot) { this.isProjectRoot = true - if (tree.isWorkspace) + } + if (tree.isWorkspace) { this.isWorkspace = true + } const bd = tree.package && tree.package.bundleDependencies - if (bd && bd.length) + if (bd && bd.length) { this.bundleDependencies = bd - if (tree.inShrinkwrap) + } + if (tree.inShrinkwrap) { this.inShrinkwrap = true - else if (tree.hasShrinkwrap) + } else if (tree.hasShrinkwrap) { this.hasShrinkwrap = true - if (tree.error) + } + if (tree.error) { this.error = treeError(tree.error) - if (tree.errors && tree.errors.length) + } + if (tree.errors && tree.errors.length) { this.errors = tree.errors.map(treeError) + } // edgesOut sorted by name if (tree.edgesOut.size) { @@ -109,10 +126,12 @@ class Edge { this.type = edge.type this.name = edge.name this.spec = edge.spec || '*' - if (edge.error) + if (edge.error) { this.error = edge.error - if (edge.overridden) + } + if (edge.overridden) { this.overridden = edge.overridden + } } } @@ -151,8 +170,9 @@ class EdgeIn extends Edge { } const printableTree = (tree, path = []) => { - if (!tree) + if (!tree) { return tree + } const Cls = tree.isLink ? ArboristLink : tree.sourceReference ? ArboristVirtualNode diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/realpath.js b/deps/npm/node_modules/@npmcli/arborist/lib/realpath.js index fa467c097a60b7..bc4bbbce384850 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/realpath.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/realpath.js @@ -14,18 +14,21 @@ const { resolve, basename, dirname } = require('path') const realpathCached = (path, rpcache, stcache, depth) => { // just a safety against extremely deep eloops /* istanbul ignore next */ - if (depth > 2000) + if (depth > 2000) { throw eloop(path) + } path = resolve(path) - if (rpcache.has(path)) + if (rpcache.has(path)) { return Promise.resolve(rpcache.get(path)) + } const dir = dirname(path) const base = basename(path) - if (base && rpcache.has(dir)) + if (base && rpcache.has(dir)) { return realpathChild(dir, base, rpcache, stcache, depth) + } // if it's the root, then we know it's real if (!base) { @@ -40,8 +43,9 @@ const realpathCached = (path, rpcache, stcache, depth) => { } const lstatCached = (path, stcache) => { - if (stcache.has(path)) + if (stcache.has(path)) { return Promise.resolve(stcache.get(path)) + } const p = lstat(path).then(st => { stcache.set(path, st) @@ -66,8 +70,9 @@ const realpathChild = (dir, base, rpcache, stcache, depth) => { const realdir = rpcache.get(dir) // that unpossible /* istanbul ignore next */ - if (typeof realdir === 'undefined') + if (typeof realdir === 'undefined') { throw new Error('in realpathChild without parent being in realpath cache') + } const realish = resolve(realdir, base) return lstatCached(realish, stcache).then(st => { @@ -78,8 +83,9 @@ const realpathChild = (dir, base, rpcache, stcache, depth) => { return readlink(realish).then(target => { const resolved = resolve(realdir, target) - if (realish === resolved) + if (realish === resolved) { throw eloop(realish) + } return realpathCached(resolved, rpcache, stcache, depth + 1) }).then(real => { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js index 83cb1f66f3a105..6e7e0e31f166bf 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -47,8 +47,9 @@ const readlink = promisify(fs.readlink) const lstat = promisify(fs.lstat) /* istanbul ignore next - version specific polyfill */ const readdir = async (path, opt) => { - if (!opt || !opt.withFileTypes) + if (!opt || !opt.withFileTypes) { return readdir_(path, opt) + } const ents = await readdir_(path, opt) if (typeof ents[0] === 'string') { return Promise.all(ents.map(async ent => { @@ -97,20 +98,22 @@ const consistentResolve = require('./consistent-resolve.js') const maybeReadFile = file => { return readFile(file, 'utf8').then(d => d, er => { /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') + if (er.code === 'ENOENT') { return '' - else + } else { throw er + } }) } const maybeStatFile = file => { return stat(file).then(st => st.isFile(), er => { /* istanbul ignore else - can't test without breaking module itself */ - if (er.code === 'ENOENT') + if (er.code === 'ENOENT') { return null - else + } else { throw er + } }) } @@ -163,13 +166,16 @@ const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => { const rel = relpath(path, dir) if (dir !== path) { const dirTime = (await stat(dir)).mtime - if (dirTime > lockTime) + if (dirTime > lockTime) { throw 'out of date, updated: ' + rel - if (!isScope && !isNM && !data.packages[rel]) + } + if (!isScope && !isNM && !data.packages[rel]) { throw 'missing from lockfile: ' + rel + } seen.add(rel) - } else + } else { seen = new Set([rel]) + } const parent = isParent ? dir : resolve(dir, 'node_modules') const children = dir === path @@ -179,26 +185,29 @@ const assertNoNewer = async (path, data, lockTime, dir = path, seen = null) => { return children.catch(() => []) .then(ents => Promise.all(ents.map(async ent => { const child = resolve(parent, ent.name) - if (ent.isDirectory() && !/^\./.test(ent.name)) + if (ent.isDirectory() && !/^\./.test(ent.name)) { await assertNoNewer(path, data, lockTime, child, seen) - else if (ent.isSymbolicLink()) { + } else if (ent.isSymbolicLink()) { const target = resolve(parent, await readlink(child)) const tstat = await stat(target).catch( /* istanbul ignore next - windows */ () => null) seen.add(relpath(path, child)) /* istanbul ignore next - windows cannot do this */ - if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) + if (tstat && tstat.isDirectory() && !seen.has(relpath(path, target))) { await assertNoNewer(path, data, lockTime, target, seen) + } } }))) .then(() => { - if (dir !== path) + if (dir !== path) { return + } // assert that all the entries in the lockfile were seen for (const loc of new Set(Object.keys(data.packages))) { - if (!seen.has(loc)) + if (!seen.has(loc)) { throw 'missing from node_modules: ' + loc + } } }) } @@ -252,39 +261,48 @@ class Shrinkwrap { const meta = {} pkgMetaKeys.forEach(key => { const val = metaFieldFromPkg(node.package, key) - if (val) + if (val) { meta[key.replace(/^_/, '')] = val + } }) // we only include name if different from the node path name, and for the // root to help prevent churn based on the name of the directory the // project is in const pname = node.packageName - if (pname && (node === node.root || pname !== node.name)) + if (pname && (node === node.root || pname !== node.name)) { meta.name = pname + } - if (node.isTop && node.package.devDependencies) + if (node.isTop && node.package.devDependencies) { meta.devDependencies = node.package.devDependencies + } nodeMetaKeys.forEach(key => { - if (node[key]) + if (node[key]) { meta[key] = node[key] + } }) const resolved = consistentResolve(node.resolved, node.path, path, true) - if (resolved) + if (resolved) { meta.resolved = resolved + } - if (node.extraneous) + if (node.extraneous) { meta.extraneous = true - else { - if (node.peer) + } else { + if (node.peer) { meta.peer = true - if (node.dev) + } + if (node.dev) { meta.dev = true - if (node.optional) + } + if (node.optional) { meta.optional = true - if (node.devOptional && !node.dev && !node.optional) + } + if (node.devOptional && !node.dev && !node.optional) { meta.devOptional = true + } } return meta } @@ -423,8 +441,9 @@ class Shrinkwrap { this.indent = indent !== undefined ? indent : this.indent this.newline = newline !== undefined ? newline : this.newline - if (!this.hiddenLockfile || !data.packages) + if (!this.hiddenLockfile || !data.packages) { return data + } // add a few ms just to account for jitter const lockTime = +(await stat(this.filename)).mtime + 10 @@ -467,8 +486,9 @@ class Shrinkwrap { // migrate a v1 package lock to the new format. const meta = this[_metaFromLock](location, name, lock) // dependencies nested under a link are actually under the link target - if (meta.link) + if (meta.link) { location = meta.resolved + } if (lock.dependencies) { for (const [name, dep] of Object.entries(lock.dependencies)) { const loc = location + (location ? '/' : '') + 'node_modules/' + name @@ -488,13 +508,15 @@ class Shrinkwrap { pkgMetaKeys.forEach(key => { const val = metaFieldFromPkg(pkg, key) const k = key.replace(/^_/, '') - if (val) + if (val) { root[k] = val + } }) for (const [loc, meta] of Object.entries(this.data.packages)) { - if (!meta.requires || !loc) + if (!meta.requires || !loc) { continue + } // resolve each require to a meta entry // if this node isn't optional, but the dep is, then it's an optionalDep @@ -523,27 +545,33 @@ class Shrinkwrap { [_resolveMetaNode] (loc, name) { for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) { const check = `${path}${path ? '/' : ''}node_modules/${name}` - if (this.data.packages[check]) + if (this.data.packages[check]) { return this.data.packages[check] + } - if (!path) + if (!path) { break + } } return null } [_lockFromLoc] (lock, path, i = 0) { - if (!lock) + if (!lock) { return null + } - if (path[i] === '') + if (path[i] === '') { i++ + } - if (i >= path.length) + if (i >= path.length) { return lock + } - if (!lock.dependencies) + if (!lock.dependencies) { return null + } return this[_lockFromLoc](lock.dependencies[path[i]], path, i + 1) } @@ -555,8 +583,9 @@ class Shrinkwrap { } delete (nodePath) { - if (!this.data) + if (!this.data) { throw new Error('run load() before getting or setting data') + } const location = this[_pathToLoc](nodePath) this[_awaitingUpdate].delete(location) @@ -564,22 +593,26 @@ class Shrinkwrap { const path = location.split(/(?:^|\/)node_modules\//) const name = path.pop() const pLock = this[_lockFromLoc](this.data, path) - if (pLock && pLock.dependencies) + if (pLock && pLock.dependencies) { delete pLock.dependencies[name] + } } get (nodePath) { - if (!this.data) + if (!this.data) { throw new Error('run load() before getting or setting data') + } const location = this[_pathToLoc](nodePath) - if (this[_awaitingUpdate].has(location)) + if (this[_awaitingUpdate].has(location)) { this[_updateWaitingNode](location) + } // first try to get from the newer spot, which we know has // all the things we need. - if (this.data.packages[location]) + if (this.data.packages[location]) { return this.data.packages[location] + } // otherwise, fall back to the legacy metadata, and hope for the best // get the node in the shrinkwrap corresponding to this spot @@ -595,8 +628,9 @@ class Shrinkwrap { // from a lockfile which may be outdated or incomplete. Since v1 // lockfiles used the "version" field to contain a variety of // different possible types of data, this gets a little complicated. - if (!lock) + if (!lock) { return {} + } // try to figure out a npm-package-arg spec from the lockfile entry // This will return null if we could not get anything valid out of it. @@ -613,29 +647,35 @@ class Shrinkwrap { } // also save the link target, omitting version since we don't know // what it is, but we know it isn't a link to itself! - if (!this.data.packages[target]) + if (!this.data.packages[target]) { this[_metaFromLock](target, name, { ...lock, version: null }) + } return this.data.packages[location] } const meta = {} // when calling loadAll we'll change these into proper dep objects - if (lock.requires && typeof lock.requires === 'object') + if (lock.requires && typeof lock.requires === 'object') { meta.requires = lock.requires + } - if (lock.optional) + if (lock.optional) { meta.optional = true - if (lock.dev) + } + if (lock.dev) { meta.dev = true + } // the root will typically have a name from the root project's // package.json file. - if (location === '') + if (location === '') { meta.name = lock.name + } // if we have integrity, save it now. - if (lock.integrity) + if (lock.integrity) { meta.integrity = lock.integrity + } if (lock.version && !lock.integrity) { // this is usually going to be a git url or symlink, but it could @@ -668,12 +708,13 @@ class Shrinkwrap { // have a fetchSpec equal to the fully resolved thing. // Registry deps, we take what's in the lockfile. if (lock.resolved || (spec.type && !spec.registry)) { - if (spec.registry) + if (spec.registry) { meta.resolved = lock.resolved - else if (spec.type === 'file') + } else if (spec.type === 'file') { meta.resolved = consistentResolve(spec, this.path, this.path, true) - else if (spec.fetchSpec) + } else if (spec.fetchSpec) { meta.resolved = spec.fetchSpec + } } // at this point, if still we don't have a version, do our best to @@ -685,32 +726,37 @@ class Shrinkwrap { versionFromTgz(spec.name, meta.resolved) if (fromTgz) { meta.version = fromTgz.version - if (fromTgz.name !== name) + if (fromTgz.name !== name) { meta.name = fromTgz.name + } } } else if (spec.type === 'alias') { meta.name = spec.subSpec.name meta.version = spec.subSpec.fetchSpec - } else if (spec.type === 'version') + } else if (spec.type === 'version') { meta.version = spec.fetchSpec + } // ok, I did my best! good luck! } - if (lock.bundled) + if (lock.bundled) { meta.inBundle = true + } // save it for next time return this.data.packages[location] = meta } add (node) { - if (!this.data) + if (!this.data) { throw new Error('run load() before getting or setting data') + } // will be actually updated on read const loc = relpath(this.path, node.path) - if (node.path === this.path) + if (node.path === this.path) { this.tree = node + } // if we have metadata about this node, and it's a match, then // try to decorate it. @@ -758,18 +804,21 @@ class Shrinkwrap { } addEdge (edge) { - if (!this.yarnLock || !edge.valid) + if (!this.yarnLock || !edge.valid) { return + } const { to: node } = edge // if it's already set up, nothing to do - if (node.resolved !== null && node.integrity !== null) + if (node.resolved !== null && node.integrity !== null) { return + } // if the yarn lock is empty, nothing to do - if (!this.yarnLock.entries || !this.yarnLock.entries.size) + if (!this.yarnLock.entries || !this.yarnLock.entries.size) { return + } // we relativize the path here because that's how it shows up in the lock // XXX how is this different from pathFixed above?? @@ -783,11 +832,13 @@ class Shrinkwrap { if (!entry || mismatch(node.version, entry.version) || mismatch(node.integrity, entry.integrity) || - mismatch(pathFixed, entry.resolved)) + mismatch(pathFixed, entry.resolved)) { return + } - if (entry.resolved && yarnRegRe.test(entry.resolved) && spec.registry) + if (entry.resolved && yarnRegRe.test(entry.resolved) && spec.registry) { entry.resolved = entry.resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') + } node.integrity = node.integrity || entry.integrity || null node.resolved = node.resolved || @@ -804,30 +855,35 @@ class Shrinkwrap { commit () { if (this.tree) { - if (this.yarnLock) + if (this.yarnLock) { this.yarnLock.fromTree(this.tree) + } const root = Shrinkwrap.metaFromNode(this.tree.target, this.path) this.data.packages = {} - if (Object.keys(root).length) + if (Object.keys(root).length) { this.data.packages[''] = root + } for (const node of this.tree.root.inventory.values()) { // only way this.tree is not root is if the root is a link to it - if (node === this.tree || node.isRoot || node.location === '') + if (node === this.tree || node.isRoot || node.location === '') { continue + } const loc = relpath(this.path, node.path) this.data.packages[loc] = Shrinkwrap.metaFromNode(node, this.path) } } else if (this[_awaitingUpdate].size > 0) { - for (const loc of this[_awaitingUpdate].keys()) + for (const loc of this[_awaitingUpdate].keys()) { this[_updateWaitingNode](loc) + } } // hidden lockfiles don't include legacy metadata or a root entry if (this.hiddenLockfile) { delete this.data.packages[''] delete this.data.dependencies - } else if (this.tree) + } else if (this.tree) { this[_buildLegacyLockfile](this.tree, this.data) + } return this.data } @@ -836,8 +892,9 @@ class Shrinkwrap { if (node === this.tree) { // the root node lock.name = node.packageName || node.name - if (node.version) + if (node.version) { lock.version = node.version + } } // npm v6 and before tracked 'from', meaning "the request that led @@ -868,26 +925,29 @@ class Shrinkwrap { const spec = !edge ? rSpec : npa.resolve(node.name, edge.spec, edge.from.realpath) - if (node.isLink) + if (node.isLink) { lock.version = `file:${relpath(this.path, node.realpath)}` - else if (spec && (spec.type === 'file' || spec.type === 'remote')) + } else if (spec && (spec.type === 'file' || spec.type === 'remote')) { lock.version = spec.saveSpec - else if (spec && spec.type === 'git' || rSpec.type === 'git') { + } else if (spec && spec.type === 'git' || rSpec.type === 'git') { lock.version = node.resolved /* istanbul ignore else - don't think there are any cases where a git * spec (or indeed, ANY npa spec) doesn't have a .raw member */ - if (spec.raw) + if (spec.raw) { lock.from = spec.raw + } } else if (!node.isRoot && node.package && node.packageName && - node.packageName !== node.name) + node.packageName !== node.name) { lock.version = `npm:${node.packageName}@${node.version}` - else if (node.package && node.version) + } else if (node.package && node.version) { lock.version = node.version + } - if (node.inDepBundle) + if (node.inDepBundle) { lock.bundled = true + } // when we didn't resolve to git, file, or dir, and didn't request // git, file, dir, or remote, then the resolved value is necessary. @@ -899,77 +959,90 @@ class Shrinkwrap { spec.type !== 'directory' && spec.type !== 'git' && spec.type !== 'file' && - spec.type !== 'remote') + spec.type !== 'remote') { lock.resolved = node.resolved + } - if (node.integrity) + if (node.integrity) { lock.integrity = node.integrity + } - if (node.extraneous) + if (node.extraneous) { lock.extraneous = true - else if (!node.isLink) { - if (node.peer) + } else if (!node.isLink) { + if (node.peer) { lock.peer = true + } - if (node.devOptional && !node.dev && !node.optional) + if (node.devOptional && !node.dev && !node.optional) { lock.devOptional = true + } - if (node.dev) + if (node.dev) { lock.dev = true + } - if (node.optional) + if (node.optional) { lock.optional = true + } } const depender = node.target if (depender.edgesOut.size > 0) { if (node !== this.tree) { - lock.requires = [...depender.edgesOut.entries()].reduce((set, [k, v]) => { + const entries = [...depender.edgesOut.entries()] + lock.requires = entries.reduce((set, [k, v]) => { // omit peer deps from legacy lockfile requires field, because // npm v6 doesn't handle peer deps, and this triggers some bad // behavior if the dep can't be found in the dependencies list. const { spec, peer } = v - if (peer) + if (peer) { return set + } if (spec.startsWith('file:')) { // turn absolute file: paths into relative paths from the node // this especially shows up with workspace edges when the root // node is also a workspace in the set. const p = resolve(node.realpath, spec.substr('file:'.length)) set[k] = `file:${relpath(node.realpath, p)}` - } else + } else { set[k] = spec + } return set }, {}) - } else + } else { lock.requires = true + } } // now we walk the children, putting them in the 'dependencies' object const {children} = node.target - if (!children.size) + if (!children.size) { delete lock.dependencies - else { + } else { const kidPath = [...path, node.realpath] const dependencies = {} // skip any that are already in the descent path, so cyclical link // dependencies don't blow up with ELOOP. let found = false for (const [name, kid] of children.entries()) { - if (path.includes(kid.realpath)) + if (path.includes(kid.realpath)) { continue + } dependencies[name] = this[_buildLegacyLockfile](kid, {}, kidPath) found = true } - if (found) + if (found) { lock.dependencies = dependencies + } } return lock } save (options = {}) { - if (!this.data) + if (!this.data) { throw new Error('run load() before saving data') + } const { format = true } = options const defaultIndent = this.indent || 2 diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/signal-handling.js b/deps/npm/node_modules/@npmcli/arborist/lib/signal-handling.js index 1051cd593970a3..0afbb05dcfc641 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/signal-handling.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/signal-handling.js @@ -13,8 +13,9 @@ const setup = fn => { const sigListeners = { loaded: false } const unload = () => { - if (!sigListeners.loaded) + if (!sigListeners.loaded) { return + } for (const sig of signals) { try { process.removeListener(sig, sigListeners[sig]) @@ -43,8 +44,9 @@ const setup = fn => { // if we exit normally, but caught a signal which would have been fatal, // then re-send it once we're done with whatever cleanup we have to do. unload() - if (process.listeners(sig).length < 1) + if (process.listeners(sig).length < 1) { process.once('beforeExit', onBeforeExit) + } fn({ signal: sig }) } @@ -56,8 +58,9 @@ const setup = fn => { try { // if we call this a bunch of times, avoid triggering the warning const { length } = process.listeners(sig) - if (length >= max) + if (length >= max) { process.setMaxListeners(length + 1) + } process.on(sig, sigListeners[sig]) } catch (er) {} } diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js b/deps/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js index eccf472a96a800..789741976269d3 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js @@ -5,19 +5,22 @@ const specFromLock = (name, lock, where) => { try { if (lock.version) { const spec = npa.resolve(name, lock.version, where) - if (lock.integrity || spec.type === 'git') + if (lock.integrity || spec.type === 'git') { return spec + } } if (lock.from) { // legacy metadata includes "from", but not integrity const spec = npa.resolve(name, lock.from, where) - if (spec.registry && lock.version) + if (spec.registry && lock.version) { return npa.resolve(name, lock.version, where) - else if (!lock.resolved) + } else if (!lock.resolved) { return spec + } } - if (lock.resolved) + if (lock.resolved) { return npa.resolve(name, lock.resolved, where) + } } catch (_) { } try { return npa.resolve(name, lock.version, where) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/tracker.js b/deps/npm/node_modules/@npmcli/arborist/lib/tracker.js index aefd5fe1bbf58a..b50f06eaa55189 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/tracker.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/tracker.js @@ -11,35 +11,37 @@ module.exports = cls => class Tracker extends cls { addTracker (section, subsection = null, key = null) { // TrackerGroup type object not found - if (!this.log.newGroup) + if (!this.log.newGroup) { return + } - if (section === null || section === undefined) + if (section === null || section === undefined) { this[_onError](`Tracker can't be null or undefined`) + } - if (key === null) + if (key === null) { key = subsection + } const hasTracker = this[_progress].has(section) const hasSubtracker = this[_progress].has(`${section}:${key}`) - if (hasTracker && subsection === null) + if (hasTracker && subsection === null) { // 0. existing tracker, no subsection this[_onError](`Tracker "${section}" already exists`) - - else if (!hasTracker && subsection === null) { + } else if (!hasTracker && subsection === null) { // 1. no existing tracker, no subsection // Create a new tracker from this.log // starts progress bar - if (this[_progress].size === 0) + if (this[_progress].size === 0) { this.log.enableProgress() + } this[_progress].set(section, this.log.newGroup(section)) - } else if (!hasTracker && subsection !== null) + } else if (!hasTracker && subsection !== null) { // 2. no parent tracker and subsection this[_onError](`Parent tracker "${section}" does not exist`) - - else if (!hasTracker || !hasSubtracker) { + } else if (!hasTracker || !hasSubtracker) { // 3. existing parent tracker, no subsection tracker // Create a new subtracker in this[_progress] from parent tracker this[_progress].set(`${section}:${key}`, @@ -52,14 +54,17 @@ module.exports = cls => class Tracker extends cls { finishTracker (section, subsection = null, key = null) { // TrackerGroup type object not found - if (!this.log.newGroup) + if (!this.log.newGroup) { return + } - if (section === null || section === undefined) + if (section === null || section === undefined) { this[_onError](`Tracker can't be null or undefined`) + } - if (key === null) + if (key === null) { key = subsection + } const hasTracker = this[_progress].has(section) const hasSubtracker = this[_progress].has(`${section}:${key}`) @@ -71,8 +76,9 @@ module.exports = cls => class Tracker extends cls { // not have any remaining children const keys = this[_progress].keys() for (const key of keys) { - if (key.match(new RegExp(section + ':'))) + if (key.match(new RegExp(section + ':'))) { this.finishTracker(section, key) + } } // remove parent tracker @@ -81,13 +87,13 @@ module.exports = cls => class Tracker extends cls { // remove progress bar if all // trackers are finished - if (this[_progress].size === 0) + if (this[_progress].size === 0) { this.log.disableProgress() - } else if (!hasTracker && subsection === null) + } + } else if (!hasTracker && subsection === null) { // 1. no existing parent tracker, no subsection this[_onError](`Tracker "${section}" does not exist`) - - else if (!hasTracker || hasSubtracker) { + } else if (!hasTracker || hasSubtracker) { // 2. subtracker exists // Finish subtracker and remove from this[_progress] this[_progress].get(`${section}:${key}`).finish() diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js index a7e8d9c0142132..44b5484c68240c 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/tree-check.js @@ -5,8 +5,9 @@ const checkTree = (tree, checkUnreachable = true) => { // this can only happen in tests where we have a "tree" object // that isn't actually a tree. - if (!tree.root || !tree.root.inventory) + if (!tree.root || !tree.root.inventory) { return tree + } const { inventory } = tree.root const seen = new Set() @@ -21,8 +22,9 @@ const checkTree = (tree, checkUnreachable = true) => { 'root=' + !!(node && node.isRoot), ]) - if (!node || seen.has(node) || node.then) + if (!node || seen.has(node) || node.then) { return + } seen.add(node) @@ -116,14 +118,18 @@ const checkTree = (tree, checkUnreachable = true) => { check(fsParent, node, 'fsParent') check(target, node, 'target') log.push(['CHILDREN', node.location, ...node.children.keys()]) - for (const kid of node.children.values()) + for (const kid of node.children.values()) { check(kid, node, 'children') - for (const kid of node.fsChildren) + } + for (const kid of node.fsChildren) { check(kid, node, 'fsChildren') - for (const link of node.linksIn) + } + for (const link of node.linksIn) { check(link, node, 'linksIn') - for (const top of node.tops) + } + for (const top of node.tops) { check(top, node, 'tops') + } log.push(['DONE', node.location]) } check(tree) diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js b/deps/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js index 4b433ea636a8b9..d5d8f7345c9bb7 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js @@ -4,8 +4,9 @@ const {basename} = require('path') const {parse} = require('url') module.exports = (name, tgz) => { const base = basename(tgz) - if (!base.endsWith('.tgz')) + if (!base.endsWith('.tgz')) { return null + } const u = parse(tgz) if (/^https?:/.test(u.protocol)) { @@ -35,8 +36,9 @@ module.exports = (name, tgz) => { } const versionFromBaseScopeName = (base, scope, name) => { - if (!base.startsWith(name + '-')) + if (!base.startsWith(name + '-')) { return null + } const parsed = semver.parse(base.substring(name.length + 1, base.length - 4)) return parsed ? { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/vuln.js b/deps/npm/node_modules/@npmcli/arborist/lib/vuln.js index 5b1d1dc1ab83d5..da44e7c34d63c5 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/vuln.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/vuln.js @@ -28,8 +28,9 @@ const severities = new Map([ [null, -1], ]) -for (const [name, val] of severities.entries()) +for (const [name, val] of severities.entries()) { severities.set(val, name) +} class Vuln { constructor ({ name, advisory }) { @@ -43,7 +44,7 @@ class Vuln { this[_simpleRange] = null this.nodes = new Set() // assume a fix is available unless it hits a top node - // that locks it in place, setting this to false or {isSemVerMajor, version}. + // that locks it in place, setting this false or {isSemVerMajor, version}. this[_fixAvailable] = true this.addAdvisory(advisory) this.packument = advisory.packument @@ -65,30 +66,35 @@ class Vuln { // - true: fix does not require -f for (const v of this.via) { // don't blow up on loops - if (v.fixAvailable === f) + if (v.fixAvailable === f) { continue + } - if (f === false) + if (f === false) { v.fixAvailable = f - else if (v.fixAvailable === true) + } else if (v.fixAvailable === true) { v.fixAvailable = f - else if (typeof f === 'object' && ( - typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) + } else if (typeof f === 'object' && ( + typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) { v.fixAvailable = f + } } } testSpec (spec) { const specObj = npa(spec) - if (!specObj.registry) + if (!specObj.registry) { return true + } - if (specObj.subSpec) + if (specObj.subSpec) { spec = specObj.subSpec.rawSpec + } for (const v of this.versions) { - if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) + if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) { return false + } } return true } @@ -135,14 +141,16 @@ class Vuln { this[_range] = null this[_simpleRange] = null // refresh severity - for (const advisory of this.advisories) + for (const advisory of this.advisories) { this.addAdvisory(advisory) + } // remove any effects that are no longer relevant const vias = new Set([...this.advisories].map(a => a.dependency)) for (const via of this.via) { - if (!vias.has(via.name)) + if (!vias.has(via.name)) { this.deleteVia(via) + } } } @@ -151,8 +159,9 @@ class Vuln { const sev = severities.get(advisory.severity) this[_range] = null this[_simpleRange] = null - if (sev > severities.get(this.severity)) + if (sev > severities.get(this.severity)) { this.severity = advisory.severity + } } get range () { @@ -161,8 +170,9 @@ class Vuln { } get simpleRange () { - if (this[_simpleRange] && this[_simpleRange] === this[_range]) + if (this[_simpleRange] && this[_simpleRange] === this[_range]) { return this[_simpleRange] + } const versions = [...this.advisories][0].versions const range = this.range @@ -171,12 +181,14 @@ class Vuln { } isVulnerable (node) { - if (this.nodes.has(node)) + if (this.nodes.has(node)) { return true + } const { version } = node.package - if (!version) + if (!version) { return false + } for (const v of this.advisories) { if (v.testVersion(version)) { diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js index e237cc5c6a4614..384ba447d72faa 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js @@ -82,13 +82,15 @@ class YarnLock { const linere = /([^\r\n]*)\r?\n/gm let match let lineNum = 0 - if (!/\n$/.test(data)) + if (!/\n$/.test(data)) { data += '\n' + } while (match = linere.exec(data)) { const line = match[1] lineNum++ - if (line.charAt(0) === '#') + if (line.charAt(0) === '#') { continue + } if (line === '') { this.endCurrent() continue @@ -117,8 +119,9 @@ class YarnLock { const metadata = this.splitQuoted(line.trimLeft(), ' ') if (metadata.length === 2) { // strip off the legacy shasum hashes - if (metadata[0] === 'resolved') + if (metadata[0] === 'resolved') { metadata[1] = metadata[1].replace(/#.*/, '') + } this.current[metadata[0]] = metadata[1] continue } @@ -141,9 +144,9 @@ class YarnLock { let o = 0 for (let i = 0; i < split.length; i++) { const chunk = split[i] - if (/^".*"$/.test(chunk)) + if (/^".*"$/.test(chunk)) { out[o++] = chunk.trim().slice(1, -1) - else if (/^"/.test(chunk)) { + } else if (/^"/.test(chunk)) { let collect = chunk.trimLeft().slice(1) while (++i < split.length) { const n = split[i] @@ -152,12 +155,14 @@ class YarnLock { if (/[^\\](\\\\)*"$/.test(n)) { collect += n.trimRight().slice(0, -1) break - } else + } else { collect += n + } } out[o++] = collect - } else + } else { out[o++] = chunk.trim() + } } return out } @@ -226,17 +231,19 @@ class YarnLock { // no previous entry for this spec at all, so it's new if (!prev) { // if we saw a match already, then assign this spec to it as well - if (priorEntry) + if (priorEntry) { priorEntry.addSpec(s) - else + } else { newSpecs.push(s) + } continue } const m = match(prev, n) // there was a prior entry, but a different thing. skip this one - if (!m) + if (!m) { continue + } // previous matches, but first time seeing it, so already has this spec. // go ahead and add all the previously unseen specs, though @@ -259,8 +266,9 @@ class YarnLock { // if we never found a matching prior, then this is a whole new thing if (!priorEntry) { const entry = Object.assign(new YarnLockEntry(newSpecs), n) - for (const s of newSpecs) + for (const s of newSpecs) { this.entries.set(s, entry) + } } else { // pick up any new info that we got for this node, so that we can // decorate with integrity/resolved/etc. @@ -270,12 +278,15 @@ class YarnLock { entryDataFromNode (node) { const n = {} - if (node.package.dependencies) + if (node.package.dependencies) { n.dependencies = node.package.dependencies - if (node.package.optionalDependencies) + } + if (node.package.optionalDependencies) { n.optionalDependencies = node.package.optionalDependencies - if (node.version) + } + if (node.version) { n.version = node.version + } if (node.resolved) { n.resolved = consistentResolve( node.resolved, @@ -284,8 +295,9 @@ class YarnLock { true ) } - if (node.integrity) + if (node.integrity) { n.integrity = node.integrity + } return n } diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 42ec2eca3b3106..5d0e31af975d2f 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "2.8.2", + "version": "2.8.3", "description": "Manage node_modules trees", "dependencies": { "@npmcli/installed-package-contents": "^1.0.7", @@ -36,13 +36,9 @@ "walk-up-path": "^1.0.0" }, "devDependencies": { + "@npmcli/lint": "^1.0.2", "benchmark": "^2.1.4", "chalk": "^4.1.0", - "eslint": "^7.9.0", - "eslint-plugin-import": "^2.22.0", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^4.0.1", "minify-registry-metadata": "^2.1.0", "tap": "^15.0.9", "tcompare": "^5.0.6" @@ -50,18 +46,19 @@ "scripts": { "test": "npm run test-only --", "test-only": "tap", - "posttest": "npm run lint", + "posttest": "npm run lint --", "snap": "tap", - "postsnap": "npm run lintfix", + "postsnap": "npm run lintfix --", "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "eslint": "eslint", - "lint": "npm run eslint -- \"lib/**/*.js\" \"test/arborist/*.js\" \"test/*.js\" \"bin/**/*.js\"", + "lint": "npm run npmclilint -- \"lib/**/*.*js\" \"bin/**/*.*js\" \"test/*.*js\" \"test/arborist/*.*js\"", "lintfix": "npm run lint -- --fix", "benchmark": "node scripts/benchmark.js", - "benchclean": "rm -rf scripts/benchmark/*/" + "benchclean": "rm -rf scripts/benchmark/*/", + "npmclilint": "npmcli-lint" }, "repository": { "type": "git", diff --git a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js index 370a2f3ffd34bf..8eed0221ba80ec 100644 --- a/deps/npm/node_modules/@npmcli/config/lib/set-envs.js +++ b/deps/npm/node_modules/@npmcli/config/lib/set-envs.js @@ -86,6 +86,8 @@ const setEnvs = (config) => { // also set some other common nice envs that we want to rely on env.HOME = config.home + env.npm_config_global_prefix = config.globalPrefix + env.npm_config_local_prefix = config.localPrefix if (cliConf.editor) env.EDITOR = cliConf.editor diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index f80669640ebd4e..b31eecbe359797 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "2.2.0", + "version": "2.3.0", "files": [ "lib" ], diff --git a/deps/npm/node_modules/are-we-there-yet/CHANGES.md b/deps/npm/node_modules/are-we-there-yet/CHANGES.md deleted file mode 100644 index 21f3b1c1284523..00000000000000 --- a/deps/npm/node_modules/are-we-there-yet/CHANGES.md +++ /dev/null @@ -1,37 +0,0 @@ -Hi, figured we could actually use a changelog now: - -## 1.1.5 2018-05-24 - -* [#92](https://github.com/iarna/are-we-there-yet/pull/92) Fix bug where - `finish` would throw errors when including `TrackerStream` objects in - `TrackerGroup` collections. (@brianloveswords) - -## 1.1.4 2017-04-21 - -* Fix typo in package.json - -## 1.1.3 2017-04-21 - -* Improve documentation and limit files included in the distribution. - -## 1.1.2 2016-03-15 - -* Add tracker group cycle detection and tests for it - -## 1.1.1 2016-01-29 - -* Fix a typo in stream completion tracker - -## 1.1.0 2016-01-29 - -* Rewrote completion percent computation to be low impact– no more walking a - tree of completion groups every time we need this info. Previously, with - medium sized tree of completion groups, even a relatively modest number of - calls to the top level `completed()` method would result in absurd numbers - of calls overall as it walked down the tree. We now, instead, keep track as - we bubble up changes, so the computation is limited to when data changes and - to the depth of that one branch, instead of _every_ node. (Plus, we were already - incurring _this_ cost, since we already bubbled out changes.) -* Moved different tracker types out to their own files. -* Made tests test for TOO MANY events too. -* Standarized the source code formatting diff --git a/deps/npm/node_modules/are-we-there-yet/LICENSE b/deps/npm/node_modules/are-we-there-yet/LICENSE deleted file mode 100644 index af4588069db82d..00000000000000 --- a/deps/npm/node_modules/are-we-there-yet/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright (c) 2015, Rebecca Turner - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/are-we-there-yet/LICENSE.md b/deps/npm/node_modules/are-we-there-yet/LICENSE.md new file mode 100644 index 00000000000000..845be76f64e789 --- /dev/null +++ b/deps/npm/node_modules/are-we-there-yet/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/are-we-there-yet/index.js b/deps/npm/node_modules/are-we-there-yet/lib/index.js similarity index 100% rename from deps/npm/node_modules/are-we-there-yet/index.js rename to deps/npm/node_modules/are-we-there-yet/lib/index.js diff --git a/deps/npm/node_modules/are-we-there-yet/tracker-base.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker-base.js similarity index 100% rename from deps/npm/node_modules/are-we-there-yet/tracker-base.js rename to deps/npm/node_modules/are-we-there-yet/lib/tracker-base.js diff --git a/deps/npm/node_modules/are-we-there-yet/tracker-group.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker-group.js similarity index 88% rename from deps/npm/node_modules/are-we-there-yet/tracker-group.js rename to deps/npm/node_modules/are-we-there-yet/lib/tracker-group.js index 9759e1226db046..9da13f8a7e116a 100644 --- a/deps/npm/node_modules/are-we-there-yet/tracker-group.js +++ b/deps/npm/node_modules/are-we-there-yet/lib/tracker-group.js @@ -19,7 +19,9 @@ util.inherits(TrackerGroup, TrackerBase) function bubbleChange (trackerGroup) { return function (name, completed, tracker) { trackerGroup.completion[tracker.id] = completed - if (trackerGroup.finished) return + if (trackerGroup.finished) { + return + } trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) } } @@ -53,17 +55,22 @@ TrackerGroup.prototype.addUnit = function (unit, weight) { this.trackers.push(unit) this.completion[unit.id] = unit.completed() unit.on('change', this.bubbleChange) - if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + if (!this.finished) { + this.emit('change', unit.name, this.completion[unit.id], unit) + } return unit } TrackerGroup.prototype.completed = function () { - if (this.trackers.length === 0) return 0 + if (this.trackers.length === 0) { + return 0 + } var valPerWeight = 1 / this.totalWeight var completed = 0 for (var ii = 0; ii < this.trackers.length; ii++) { var trackerId = this.trackers[ii].id - completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + completed += + valPerWeight * this.weight[trackerId] * this.completion[trackerId] } return completed } @@ -82,7 +89,9 @@ TrackerGroup.prototype.newStream = function (name, todo, weight) { TrackerGroup.prototype.finish = function () { this.finished = true - if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + if (!this.trackers.length) { + this.addUnit(new Tracker(), 1, true) + } for (var ii = 0; ii < this.trackers.length; ii++) { var tracker = this.trackers[ii] tracker.finish() diff --git a/deps/npm/node_modules/are-we-there-yet/tracker-stream.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js similarity index 100% rename from deps/npm/node_modules/are-we-there-yet/tracker-stream.js rename to deps/npm/node_modules/are-we-there-yet/lib/tracker-stream.js diff --git a/deps/npm/node_modules/are-we-there-yet/tracker.js b/deps/npm/node_modules/are-we-there-yet/lib/tracker.js similarity index 90% rename from deps/npm/node_modules/are-we-there-yet/tracker.js rename to deps/npm/node_modules/are-we-there-yet/lib/tracker.js index 68c2339b45409a..a8f8b3ba013915 100644 --- a/deps/npm/node_modules/are-we-there-yet/tracker.js +++ b/deps/npm/node_modules/are-we-there-yet/lib/tracker.js @@ -20,7 +20,9 @@ Tracker.prototype.addWork = function (work) { Tracker.prototype.completeWork = function (work) { this.workDone += work - if (this.workDone > this.workTodo) this.workDone = this.workTodo + if (this.workDone > this.workTodo) { + this.workDone = this.workTodo + } this.emit('change', this.name, this.completed(), this) } diff --git a/deps/npm/node_modules/are-we-there-yet/package.json b/deps/npm/node_modules/are-we-there-yet/package.json index c5990c9bdc5b22..d3901a86d67c67 100644 --- a/deps/npm/node_modules/are-we-there-yet/package.json +++ b/deps/npm/node_modules/are-we-there-yet/package.json @@ -1,35 +1,53 @@ { "name": "are-we-there-yet", - "version": "1.1.5", + "version": "1.1.6", "description": "Keep track of the overall completion of many disparate processes", - "main": "index.js", + "main": "lib/index.js", "scripts": { - "test": "standard && tap test/*.js" + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap" }, "repository": { "type": "git", - "url": "https://github.com/iarna/are-we-there-yet.git" + "url": "https://github.com/npm/are-we-there-yet.git" }, - "author": "Rebecca Turner (http://re-becca.org)", + "author": "GitHub Inc.", "license": "ISC", "bugs": { - "url": "https://github.com/iarna/are-we-there-yet/issues" + "url": "https://github.com/npm/are-we-there-yet/issues" }, - "homepage": "https://github.com/iarna/are-we-there-yet", + "homepage": "https://github.com/npm/are-we-there-yet", "devDependencies": { - "standard": "^11.0.1", - "tap": "^12.0.1" + "@npmcli/eslint-config": "^1.0.0", + "@npmcli/template-oss": "^1.0.2", + "eslint": "^7.32.0", + "eslint-plugin-node": "^11.1.0", + "tap": "^15.0.9" }, "dependencies": { "delegates": "^1.0.0", - "readable-stream": "^2.0.6" + "readable-stream": "^3.6.0" }, "files": [ - "index.js", - "tracker-base.js", - "tracker-group.js", - "tracker-stream.js", - "tracker.js", - "CHANGES.md" - ] + "bin", + "lib" + ], + "engines": { + "node": ">=10" + }, + "tap": { + "branches": 68, + "statements": 92, + "functions": 86, + "lines": 92 + }, + "templateVersion": "1.0.2" } diff --git a/deps/npm/node_modules/isarray/Makefile b/deps/npm/node_modules/isarray/Makefile deleted file mode 100644 index 787d56e1e982e4..00000000000000 --- a/deps/npm/node_modules/isarray/Makefile +++ /dev/null @@ -1,6 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test - diff --git a/deps/npm/node_modules/isarray/component.json b/deps/npm/node_modules/isarray/component.json deleted file mode 100644 index 9e31b683889015..00000000000000 --- a/deps/npm/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/deps/npm/node_modules/isarray/index.js b/deps/npm/node_modules/isarray/index.js deleted file mode 100644 index a57f63495943a0..00000000000000 --- a/deps/npm/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/deps/npm/node_modules/isarray/package.json b/deps/npm/node_modules/isarray/package.json deleted file mode 100644 index 1a4317a9c41c73..00000000000000 --- a/deps/npm/node_modules/isarray/package.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "isarray", - "description": "Array#isArray for older browsers", - "version": "1.0.0", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "homepage": "https://github.com/juliangruber/isarray", - "main": "index.js", - "dependencies": {}, - "devDependencies": { - "tape": "~2.13.4" - }, - "keywords": [ - "browser", - "isarray", - "array" - ], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT", - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "scripts": { - "test": "tape test.js" - } -} diff --git a/deps/npm/node_modules/isarray/test.js b/deps/npm/node_modules/isarray/test.js deleted file mode 100644 index e0c3444d85d5c7..00000000000000 --- a/deps/npm/node_modules/isarray/test.js +++ /dev/null @@ -1,20 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); - diff --git a/deps/npm/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/minipass-fetch/lib/request.js index 173f415d18e7b4..278b27874eaaca 100644 --- a/deps/npm/node_modules/minipass-fetch/lib/request.js +++ b/deps/npm/node_modules/minipass-fetch/lib/request.js @@ -82,7 +82,7 @@ class Request extends Body { key, passphrase, pfx, - rejectUnauthorized = true, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', secureOptions, secureProtocol, servername, diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json index 64dab7816bd120..1d7d9e47566236 100644 --- a/deps/npm/node_modules/minipass-fetch/package.json +++ b/deps/npm/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "1.3.4", + "version": "1.4.1", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -22,11 +22,10 @@ "form-data": "^2.5.1", "parted": "^0.1.1", "string-to-arraybuffer": "^1.0.2", - "tap": "^14.6.9", + "tap": "^15.0.9", "whatwg-url": "^7.0.0" }, "dependencies": { - "encoding": "^0.1.12", "minipass": "^3.1.0", "minipass-sized": "^1.0.3", "minizlib": "^2.0.0" diff --git a/deps/npm/node_modules/npmlog/log.js b/deps/npm/node_modules/npmlog/log.js index 069154262e4da9..85ab8a4c7905cf 100644 --- a/deps/npm/node_modules/npmlog/log.js +++ b/deps/npm/node_modules/npmlog/log.js @@ -13,8 +13,9 @@ var stream = process.stderr Object.defineProperty(log, 'stream', { set: function (newStream) { stream = newStream - if (this.gauge) + if (this.gauge) { this.gauge.setWriteTo(stream, stream) + } }, get: function () { return stream @@ -78,20 +79,23 @@ log.setGaugeTemplate = function (template) { } log.enableProgress = function () { - if (this.progressEnabled) + if (this.progressEnabled) { return + } this.progressEnabled = true this.tracker.on('change', this.showProgress) - if (this._paused) + if (this._paused) { return + } this.gauge.enable() } log.disableProgress = function () { - if (!this.progressEnabled) + if (!this.progressEnabled) { return + } this.progressEnabled = false this.tracker.removeListener('change', this.showProgress) this.gauge.disable() @@ -103,19 +107,23 @@ var mixinLog = function (tracker) { // mixin the public methods from log into the tracker // (except: conflicts and one's we handle specially) Object.keys(log).forEach(function (P) { - if (P[0] === '_') + if (P[0] === '_') { return + } if (trackerConstructors.filter(function (C) { return C === P - }).length) + }).length) { return + } - if (tracker[P]) + if (tracker[P]) { return + } - if (typeof log[P] !== 'function') + if (typeof log[P] !== 'function') { return + } var func = log[P] tracker[P] = function () { @@ -143,27 +151,31 @@ trackerConstructors.forEach(function (C) { }) log.clearProgress = function (cb) { - if (!this.progressEnabled) + if (!this.progressEnabled) { return cb && process.nextTick(cb) + } this.gauge.hide(cb) } log.showProgress = function (name, completed) { - if (!this.progressEnabled) + if (!this.progressEnabled) { return + } var values = {} - if (name) + if (name) { values.section = name + } var last = log.record[log.record.length - 1] if (last) { values.subsection = last.prefix var disp = log.disp[last.level] || last.level var logline = this._format(disp, log.style[last.level]) - if (last.prefix) + if (last.prefix) { logline += ' ' + this._format(last.prefix, this.prefixStyle) + } logline += ' ' + last.message.split(/\r?\n/)[0] values.logline = logline @@ -175,13 +187,15 @@ log.showProgress = function (name, completed) { // temporarily stop emitting, but don't drop log.pause = function () { this._paused = true - if (this.progressEnabled) + if (this.progressEnabled) { this.gauge.disable() + } } log.resume = function () { - if (!this._paused) + if (!this._paused) { return + } this._paused = false @@ -190,8 +204,9 @@ log.resume = function () { b.forEach(function (m) { this.emitLog(m) }, this) - if (this.progressEnabled) + if (this.progressEnabled) { this.gauge.enable() + } } log._buffer = [] @@ -220,8 +235,9 @@ log.log = function (lvl, prefix, message) { }) } } - if (stack) + if (stack) { a.unshift(stack + '\n') + } message = util.format.apply(util, a) var m = { @@ -234,8 +250,9 @@ log.log = function (lvl, prefix, message) { this.emit('log', m) this.emit('log.' + lvl, m) - if (m.prefix) + if (m.prefix) { this.emit(m.prefix, m) + } this.record.push(m) var mrs = this.maxRecordSize @@ -253,18 +270,22 @@ log.emitLog = function (m) { this._buffer.push(m) return } - if (this.progressEnabled) + if (this.progressEnabled) { this.gauge.pulse(m.prefix) + } var l = this.levels[m.level] - if (l === undefined) + if (l === undefined) { return + } - if (l < this.levels[this.level]) + if (l < this.levels[this.level]) { return + } - if (l > 0 && !isFinite(l)) + if (l > 0 && !isFinite(l)) { return + } // If 'disp' is null or undefined, use the lvl as a default // Allows: '', 0 as valid disp @@ -277,8 +298,9 @@ log.emitLog = function (m) { } this.write(disp, log.style[m.level]) var p = m.prefix || '' - if (p) + if (p) { this.write(' ') + } this.write(p, this.prefixStyle) this.write(' ' + line + '\n') @@ -287,52 +309,63 @@ log.emitLog = function (m) { } log._format = function (msg, style) { - if (!stream) + if (!stream) { return + } var output = '' if (this.useColor()) { style = style || {} var settings = [] - if (style.fg) + if (style.fg) { settings.push(style.fg) + } - if (style.bg) + if (style.bg) { settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + } - if (style.bold) + if (style.bold) { settings.push('bold') + } - if (style.underline) + if (style.underline) { settings.push('underline') + } - if (style.inverse) + if (style.inverse) { settings.push('inverse') + } - if (settings.length) + if (settings.length) { output += consoleControl.color(settings) + } - if (style.beep) + if (style.beep) { output += consoleControl.beep() + } } output += msg - if (this.useColor()) + if (this.useColor()) { output += consoleControl.color('reset') + } return output } log.write = function (msg, style) { - if (!stream) + if (!stream) { return + } stream.write(this._format(msg, style)) } log.addLevel = function (lvl, n, style, disp) { // If 'disp' is null or undefined, use the lvl as a default - if (disp == null) + if (disp == null) { disp = lvl + } this.levels[lvl] = n this.style[lvl] = style @@ -340,8 +373,9 @@ log.addLevel = function (lvl, n, style, disp) { this[lvl] = function () { var a = new Array(arguments.length + 1) a[0] = lvl - for (var i = 0; i < arguments.length; i++) + for (var i = 0; i < arguments.length; i++) { a[i + 1] = arguments[i] + } return this.log.apply(this, a) }.bind(this) diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/LICENSE.md b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/LICENSE.md new file mode 100644 index 00000000000000..845be76f64e789 --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/index.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/index.js new file mode 100644 index 00000000000000..57d8743fdad177 --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/index.js @@ -0,0 +1,4 @@ +'use strict' +exports.TrackerGroup = require('./tracker-group.js') +exports.Tracker = require('./tracker.js') +exports.TrackerStream = require('./tracker-stream.js') diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-base.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-base.js new file mode 100644 index 00000000000000..6f436875578a7a --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-group.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-group.js new file mode 100644 index 00000000000000..9da13f8a7e116a --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-group.js @@ -0,0 +1,116 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) { + return + } + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) { + this.emit('change', unit.name, this.completion[unit.id], unit) + } + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) { + return 0 + } + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += + valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) { + this.addUnit(new Tracker(), 1, true) + } + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-stream.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-stream.js new file mode 100644 index 00000000000000..e1cf85055702a7 --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker-stream.js @@ -0,0 +1,36 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker.js b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker.js new file mode 100644 index 00000000000000..a8f8b3ba013915 --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/lib/tracker.js @@ -0,0 +1,32 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) { + this.workDone = this.workTodo + } + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/package.json b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/package.json new file mode 100644 index 00000000000000..5714e09c3b3714 --- /dev/null +++ b/deps/npm/node_modules/npmlog/node_modules/are-we-there-yet/package.json @@ -0,0 +1,53 @@ +{ + "name": "are-we-there-yet", + "version": "2.0.0", + "description": "Keep track of the overall completion of many disparate processes", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/are-we-there-yet.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/are-we-there-yet/issues" + }, + "homepage": "https://github.com/npm/are-we-there-yet", + "devDependencies": { + "@npmcli/eslint-config": "^1.0.0", + "@npmcli/template-oss": "^1.0.2", + "eslint": "^7.32.0", + "eslint-plugin-node": "^11.1.0", + "tap": "^15.0.9" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": ">=10" + }, + "tap": { + "branches": 68, + "statements": 92, + "functions": 86, + "lines": 92 + }, + "templateVersion": "1.0.2" +} diff --git a/deps/npm/node_modules/npmlog/package.json b/deps/npm/node_modules/npmlog/package.json index 5288b9ca062569..960ea9250d949c 100644 --- a/deps/npm/node_modules/npmlog/package.json +++ b/deps/npm/node_modules/npmlog/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "name": "npmlog", "description": "logger for npm", - "version": "5.0.0", + "version": "5.0.1", "repository": { "type": "git", "url": "https://github.com/npm/npmlog.git" @@ -20,7 +20,7 @@ "postsnap": "npm run lintfix --" }, "dependencies": { - "are-we-there-yet": "^1.1.5", + "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", "gauge": "^3.0.0", "set-blocking": "^2.0.0" diff --git a/deps/npm/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/process-nextick-args/index.js deleted file mode 100644 index 3eecf11488531c..00000000000000 --- a/deps/npm/node_modules/process-nextick-args/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict'; - -if (typeof process === 'undefined' || - !process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = { nextTick: nextTick }; -} else { - module.exports = process -} - -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } -} - diff --git a/deps/npm/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/process-nextick-args/license.md deleted file mode 100644 index c67e3532b54245..00000000000000 --- a/deps/npm/node_modules/process-nextick-args/license.md +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2015 Calvin Metcalf - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.** diff --git a/deps/npm/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/process-nextick-args/package.json deleted file mode 100644 index 6070b723fcd347..00000000000000 --- a/deps/npm/node_modules/process-nextick-args/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "process-nextick-args", - "version": "2.0.1", - "description": "process.nextTick but always with args", - "main": "index.js", - "files": [ - "index.js" - ], - "scripts": { - "test": "node test.js" - }, - "repository": { - "type": "git", - "url": "https://github.com/calvinmetcalf/process-nextick-args.git" - }, - "author": "", - "license": "MIT", - "bugs": { - "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" - }, - "homepage": "https://github.com/calvinmetcalf/process-nextick-args", - "devDependencies": { - "tap": "~0.2.6" - } -} diff --git a/deps/npm/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/process-nextick-args/readme.md deleted file mode 100644 index ecb432c9b21ffd..00000000000000 --- a/deps/npm/node_modules/process-nextick-args/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -process-nextick-args -===== - -[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) - -```bash -npm install --save process-nextick-args -``` - -Always be able to pass arguments to process.nextTick, no matter the platform - -```js -var pna = require('process-nextick-args'); - -pna.nextTick(function (a, b, c) { - console.log(a, b, c); -}, 'step', 3, 'profit'); -``` diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json index 5ca535cfd14adb..fb263fd711ee7f 100644 --- a/deps/npm/node_modules/read-package-json/package.json +++ b/deps/npm/node_modules/read-package-json/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json", - "version": "4.0.1", + "version": "4.1.1", "author": "Isaac Z. Schlueter (http://blog.izs.me/)", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { @@ -12,7 +12,7 @@ "prerelease": "npm t", "postrelease": "npm publish && git push --follow-tags", "release": "standard-version -s", - "test": "tap --nyc-arg=--all --coverage test/*.js --branches 68 --functions 83 --lines 76 --statements 77", + "test": "tap --nyc-arg=--all --coverage test/*.js", "npmclilint": "npmcli-lint", "lint": "npm run npmclilint -- --ignore-pattern test/fixtures \"*.*js\" \"test/**/*.*js\"", "lintfix": "npm run lint -- --fix", @@ -36,5 +36,11 @@ ], "engines": { "node": ">=10" + }, + "tap": { + "branches": 68, + "functions": 83, + "lines": 76, + "statements": 77 } } diff --git a/deps/npm/node_modules/read-package-json/read-json.js b/deps/npm/node_modules/read-package-json/read-json.js index 04d22e3af7e211..468a33e3977742 100644 --- a/deps/npm/node_modules/read-package-json/read-json.js +++ b/deps/npm/node_modules/read-package-json/read-json.js @@ -21,6 +21,7 @@ readJson.extraSet = [ mans, bins, githead, + fillTypes, ] var typoWarned = {} @@ -339,16 +340,17 @@ function readme_ (file, data, rm, cb) { } function mans (file, data, cb) { - var m = data.directories && data.directories.man - if (data.man || !m) { + let cwd = data.directories && data.directories.man + if (data.man || !cwd) { return cb(null, data) } - m = path.resolve(path.dirname(file), m) - glob('**/*.[0-9]', { cwd: m }, function (er, mans) { + const dirname = path.dirname(file) + cwd = path.resolve(path.dirname(file), cwd) + glob('**/*.[0-9]', { cwd }, function (er, mans) { if (er) { return cb(er) } - data.man = mans + data.man = mans.map(man => path.relative(dirname, path.join(cwd, man))) return cb(null, data) }) } @@ -517,6 +519,45 @@ function final (file, data, log, strict, cb) { }) } +function fillTypes (file, data, cb) { + var index = data.main ? data.main : 'index.js' + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + + var extless = + path.join(path.dirname(index), path.basename(index, path.extname(index))) + var dts = `./${extless}.d.ts` + var dtsPath = path.join(path.dirname(file), dts) + var hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields && fs.existsSync(dtsPath)) { + data.types = dts + } + + cb(null, data) +} + function makePackageId (data) { var name = cleanString(data.name) var ver = cleanString(data.version) diff --git a/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index 83275f192e4077..00000000000000 --- a/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,60 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section - - diff --git a/deps/npm/node_modules/readable-stream/duplex-browser.js b/deps/npm/node_modules/readable-stream/duplex-browser.js deleted file mode 100644 index f8b2db83dbe733..00000000000000 --- a/deps/npm/node_modules/readable-stream/duplex-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/_stream_duplex.js'); diff --git a/deps/npm/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/readable-stream/duplex.js deleted file mode 100644 index 46924cbfdf5387..00000000000000 --- a/deps/npm/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').Duplex diff --git a/deps/npm/node_modules/readable-stream/errors-browser.js b/deps/npm/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000000000..fb8e73e1893b10 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/deps/npm/node_modules/readable-stream/errors.js b/deps/npm/node_modules/readable-stream/errors.js new file mode 100644 index 00000000000000..8471526d6e7f75 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/deps/npm/node_modules/readable-stream/experimentalWarning.js b/deps/npm/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000000000..78e841495bf24d --- /dev/null +++ b/deps/npm/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js index 57003c32d256c0..67525192250f6d 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js @@ -18,43 +18,37 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. - 'use strict'; - /**/ -var pna = require('process-nextick-args'); -/**/ - -/**/ var objectKeys = Object.keys || function (obj) { var keys = []; + for (var key in obj) { keys.push(key); - }return keys; + } + + return keys; }; /**/ -module.exports = Duplex; -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ +module.exports = Duplex; var Readable = require('./_stream_readable'); + var Writable = require('./_stream_writable'); -util.inherits(Duplex, Readable); +require('inherits')(Duplex, Readable); { - // avoid scope creep, the keys array can then be collected + // Allow the keys array to be GC'ed. var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { var method = keys[v]; if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; @@ -63,18 +57,19 @@ util.inherits(Duplex, Readable); function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - this.once('end', onend); + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } } Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { @@ -82,20 +77,35 @@ Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function () { + get: function get() { return this._writableState.highWaterMark; } }); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer -// the no-half-open enforcer function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); + + process.nextTick(onEndNT, this); } function onEndNT(self) { @@ -103,29 +113,27 @@ function onEndNT(self) { } Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { if (this._readableState === undefined || this._writableState === undefined) { return false; } + return this._readableState.destroyed && this._writableState.destroyed; }, - set: function (value) { + set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (this._readableState === undefined || this._writableState === undefined) { return; - } - - // backward compatibility, the user is explicitly + } // backward compatibility, the user is explicitly // managing destroyed + + this._readableState.destroyed = value; this._writableState.destroyed = value; } -}); - -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); - - pna.nextTick(cb, err); -}; \ No newline at end of file +}); \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js index 612edb4d8b443f..32e7414c5a8271 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js @@ -18,27 +18,19 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. - 'use strict'; module.exports = PassThrough; var Transform = require('./_stream_transform'); -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); +require('inherits')(PassThrough, Transform); function PassThrough(options) { if (!(this instanceof PassThrough)) return new PassThrough(options); - Transform.call(this, options); } diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js index 0f807646b0f67d..192d451488f208 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js @@ -18,118 +18,110 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - 'use strict'; -/**/ - -var pna = require('process-nextick-args'); -/**/ - module.exports = Readable; - /**/ -var isArray = require('isarray'); -/**/ -/**/ var Duplex; /**/ Readable.ReadableState = ReadableState; - /**/ + var EE = require('events').EventEmitter; -var EElistenerCount = function (emitter, type) { +var EElistenerCount = function EElistenerCount(emitter, type) { return emitter.listeners(type).length; }; /**/ /**/ + + var Stream = require('./internal/streams/stream'); /**/ -/**/ -var Buffer = require('safe-buffer').Buffer; +var Buffer = require('buffer').Buffer; + var OurUint8Array = global.Uint8Array || function () {}; + function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } + function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } - -/**/ - /**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ -/**/ + var debugUtil = require('util'); -var debug = void 0; + +var debug; + if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); } else { - debug = function () {}; + debug = function debug() {}; } /**/ -var BufferList = require('./internal/streams/BufferList'); + +var BufferList = require('./internal/streams/buffer_list'); + var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + var StringDecoder; +var createReadableStreamAsyncIterator; +var from; -util.inherits(Readable, Stream); +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); - - // This is a hack to make sure that our error handler is attached before any + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } -function ReadableState(options, stream) { +function ReadableState(options, stream, isDuplex) { Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // Duplex streams are both readable and writable, but share + options = options || {}; // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; - // object stream flag. Used to make read(n) ignore n and to + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; - - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); - // A linked list is used to store data chunks instead of an array because the + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift() + this.buffer = new BufferList(); this.length = 0; this.pipes = null; @@ -137,37 +129,36 @@ function ReadableState(options, stream) { this.flowing = null; this.ended = false; this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the event 'readable'/'data' is emitted + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. - this.sync = true; - // whenever we return null, then we set a flag to say + this.sync = true; // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. + this.needReadable = false; this.emittedReadable = false; this.readableListening = false; this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. - // has it been destroyed - this.destroyed = false; + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') - // Crypto is kind of old and crusty. Historically, its default string + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s - // if true, a maybeReadMore has been scheduled - this.readingMore = false; + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + this.readingMore = false; this.decoder = null; this.encoding = null; + if (options.encoding) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; this.decoder = new StringDecoder(options.encoding); @@ -177,17 +168,16 @@ function ReadableState(options, stream) { function Readable(options) { Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy - // legacy this.readable = true; if (options) { if (typeof options.read === 'function') this._read = options.read; - if (typeof options.destroy === 'function') this._destroy = options.destroy; } @@ -195,36 +185,40 @@ function Readable(options) { } Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { if (this._readableState === undefined) { return false; } + return this._readableState.destroyed; }, - set: function (value) { + set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._readableState) { return; - } - - // backward compatibility, the user is explicitly + } // backward compatibility, the user is explicitly // managing destroyed + + this._readableState.destroyed = value; } }); - Readable.prototype.destroy = destroyImpl.destroy; Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function (err, cb) { - this.push(null); cb(err); -}; - -// Manually shove something into the read() buffer. +}; // Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. + + Readable.prototype.push = function (chunk, encoding) { var state = this._readableState; var skipChunkCheck; @@ -232,10 +226,12 @@ Readable.prototype.push = function (chunk, encoding) { if (!state.objectMode) { if (typeof chunk === 'string') { encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { chunk = Buffer.from(chunk, encoding); encoding = ''; } + skipChunkCheck = true; } } else { @@ -243,34 +239,40 @@ Readable.prototype.push = function (chunk, encoding) { } return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; +}; // Unshift should *always* be something directly out of read() + -// Unshift should *always* be something directly out of read() Readable.prototype.unshift = function (chunk) { return readableAddChunk(this, chunk, null, true, false); }; function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); var state = stream._readableState; + if (chunk === null) { state.reading = false; onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { - stream.emit('error', er); + errorOrDestroy(stream, er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; } else { state.reading = false; + if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); @@ -280,61 +282,73 @@ function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { } } else if (!addToFront) { state.reading = false; + maybeReadMore(stream, state); } - } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. - return needMoreData(state); + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); } function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; stream.emit('data', chunk); - stream.read(0); } else { // update the buffer info. state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); } + maybeReadMore(stream, state); } function chunkInvalid(state, chunk) { var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); } - return er; -} -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); + return er; } Readable.prototype.isPaused = function () { return this._readableState.flowing === false; -}; +}; // backwards compatibility. + -// backwards compatibility. Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; return this; -}; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. n = MAX_HWM; } else { // Get the next highest power of 2 to prevent increasing hwm excessively in @@ -347,56 +361,55 @@ function computeNewHighWaterMark(n) { n |= n >>> 16; n++; } - return n; -} -// This function is designed to be inlinable, so please take care when making + return n; +} // This function is designed to be inlinable, so please take care when making // changes to the function body. + + function howMuchToRead(n, state) { if (n <= 0 || state.length === 0 && state.ended) return 0; if (state.objectMode) return 1; + if (n !== n) { // Only flow one buffer at a time if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. + } // If we're asking for more than the current hwm, then raise the hwm. + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough + if (n <= state.length) return n; // Don't have enough + if (!state.ended) { state.needReadable = true; return 0; } + return state.length; -} +} // you can override either this method, or the async _read(n) below. + -// you can override either this method, or the async _read(n) below. Readable.prototype.read = function (n) { debug('read', n); n = parseInt(n, 10); var state = this._readableState; var nOrig = n; - - if (n !== 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; } - n = howMuchToRead(n, state); + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. - // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; - } - - // All the actual chunk generation logic needs to be + } // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change @@ -417,33 +430,34 @@ Readable.prototype.read = function (n) { // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. + + var doRead = state.needReadable; - debug('need readable', doRead); + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some - // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already + } // however, if we've ended, then there's no point, and if we're already // reading, then it's unnecessary. + + if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } else if (doRead) { debug('do read'); state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); } @@ -451,91 +465,144 @@ Readable.prototype.read = function (n) { if (n > 0) ret = fromList(n, state);else ret = null; if (ret === null) { - state.needReadable = true; + state.needReadable = state.length <= state.highWaterMark; n = 0; } else { state.length -= n; + state.awaitDrain = 0; } if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. - // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended) endReadable(this); } if (ret !== null) this.emit('data', ret); - return ret; }; function onEofChunk(stream, state) { + debug('onEofChunk'); if (state.ended) return; + if (state.decoder) { var chunk = state.decoder.end(); + if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } + state.ended = true; - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; -// Don't emit readable right away in sync mode, because this can trigger + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. + + function emitReadable(stream) { var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); state.needReadable = false; + if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + process.nextTick(emitReadable_, stream); } } function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); -// at this point, the user has presumably seen the 'readable' event, + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. + + function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); + process.nextTick(maybeReadMore_, stream, state); } } function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; debug('maybeReadMore read 0'); stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; + if (len === state.length) // didn't get any data, stop spinning. + break; } - state.readingMore = false; -} -// abstract method. to be overridden in specific implementation classes. + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. + + Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); }; Readable.prototype.pipe = function (dest, pipeOpts) { @@ -546,24 +613,26 @@ Readable.prototype.pipe = function (dest, pipeOpts) { case 0: state.pipes = dest; break; + case 1: state.pipes = [state.pipes, dest]; break; + default: state.pipes.push(dest); break; } + state.pipesCount += 1; debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); - + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { debug('onunpipe'); + if (readable === src) { if (unpipeInfo && unpipeInfo.hasUnpiped === false) { unpipeInfo.hasUnpiped = true; @@ -575,19 +644,19 @@ Readable.prototype.pipe = function (dest, pipeOpts) { function onend() { debug('onend'); dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter + } // when the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. + + var ondrain = pipeOnDrain(src); dest.on('drain', ondrain); - var cleanedUp = false; + function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken + debug('cleanup'); // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); dest.removeListener('finish', onfinish); dest.removeListener('drain', ondrain); @@ -596,75 +665,71 @@ Readable.prototype.pipe = function (dest, pipeOpts) { src.removeListener('end', onend); src.removeListener('end', unpipe); src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this + cleanedUp = true; // if the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); } - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; src.on('data', ondata); + function ondata(chunk) { debug('ondata'); - increasedAwaitDrain = false; var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { + debug('dest.write', ret); + + if (ret === false) { // If the user unpiped during `dest.write()`, it is possible // to get stuck in a permanently paused state if that write // also returned false. // => Check whether `dest` is still a piping destination. if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - increasedAwaitDrain = true; + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; } + src.pause(); } - } - - // if the dest has an error, then stop piping into it. + } // if the dest has an error, then stop piping into it. // however, don't suppress the throwing behavior for this. + + function onerror(er) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); - // Both close and finish should trigger unpipe, but only once. + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + function onclose() { dest.removeListener('finish', onfinish); unpipe(); } + dest.once('close', onclose); + function onfinish() { debug('onfinish'); dest.removeListener('close', onclose); unpipe(); } + dest.once('finish', onfinish); function unpipe() { debug('unpipe'); src.unpipe(dest); - } + } // tell the dest that it's being piped to - // tell the dest that it's being piped to - dest.emit('pipe', src); - // start the flow if it hasn't been started already. + dest.emit('pipe', src); // start the flow if it hasn't been started already. + if (!state.flowing) { debug('pipe resume'); src.resume(); @@ -674,10 +739,11 @@ Readable.prototype.pipe = function (dest, pipeOpts) { }; function pipeOnDrain(src) { - return function () { + return function pipeOnDrainFunctionResult() { var state = src._readableState; debug('pipeOnDrain', state.awaitDrain); if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { state.flowing = true; flow(src); @@ -687,27 +753,24 @@ function pipeOnDrain(src) { Readable.prototype.unpipe = function (dest) { var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; + if (state.pipesCount === 0) return this; // just one destination. most common case. - // just one destination. most common case. if (state.pipesCount === 1) { // passed in one, but it's not the right one. if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. - if (!dest) dest = state.pipes; - - // got a match. state.pipes = null; state.pipesCount = 0; state.flowing = false; if (dest) dest.emit('unpipe', this, unpipeInfo); return this; - } + } // slow case. multiple pipe destinations. - // slow case. multiple pipe destinations. if (!dest) { // remove all. @@ -718,80 +781,139 @@ Readable.prototype.unpipe = function (dest) { state.flowing = false; for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, unpipeInfo); - }return this; - } + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + - // try to find the right one. var index = indexOf(state.pipes, dest); if (index === -1) return this; - state.pipes.splice(index, 1); state.pipesCount -= 1; if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit('unpipe', this, unpipeInfo); - return this; -}; - -// set up data events if they are asked for +}; // set up data events if they are asked for // Ensure readable listeners eventually get something + + Readable.prototype.on = function (ev, fn) { var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); } else if (ev === 'readable') { - var state = this._readableState; if (!state.endEmitted && !state.readableListening) { state.readableListening = state.needReadable = true; + state.flowing = false; state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { + debug('on readable', state.length, state.reading); + + if (state.length) { emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); } } } return res; }; + Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + function nReadingNextTick(self) { debug('readable nexttick read 0'); self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API +} // pause() and resume() are remnants of the legacy readable stream API // If the user uses them, then switch into old mode. + + Readable.prototype.resume = function () { var state = this._readableState; + if (!state.flowing) { - debug('resume'); - state.flowing = true; + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; resume(this, state); } + + state.paused = false; return this; }; function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); + process.nextTick(resume_, stream, state); } } function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { - debug('resume read 0'); stream.read(0); } state.resumeScheduled = false; - state.awaitDrain = 0; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) stream.read(0); @@ -799,31 +921,37 @@ function resume_(stream, state) { Readable.prototype.pause = function () { debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { + + if (this._readableState.flowing !== false) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); } + + this._readableState.paused = true; return this; }; function flow(stream) { var state = stream._readableState; debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} -// wrap an old-style stream as the async data source. + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. + + Readable.prototype.wrap = function (stream) { var _this = this; var state = this._readableState; var paused = false; - stream.on('end', function () { debug('wrapped end'); + if (state.decoder && !state.ended) { var chunk = state.decoder.end(); if (chunk && chunk.length) _this.push(chunk); @@ -831,42 +959,41 @@ Readable.prototype.wrap = function (stream) { _this.push(null); }); - stream.on('data', function (chunk) { debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode - // don't skip over falsy values in objectMode if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; var ret = _this.push(chunk); + if (!ret) { paused = true; stream.pause(); } - }); - - // proxy all the other methods. + }); // proxy all the other methods. // important when wrapping filters and duplexes. + for (var i in stream) { if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { return stream[method].apply(stream, arguments); }; }(i); } - } + } // proxy certain important events. + - // proxy certain important events. for (var n = 0; n < kProxyEvents.length; n++) { stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } - - // when we try to consume some more bytes, simply unpause the + } // when we try to consume some more bytes, simply unpause the // underlying stream. + + this._read = function (n) { debug('wrapped _read', n); + if (paused) { paused = false; stream.resume(); @@ -876,144 +1003,122 @@ Readable.prototype.wrap = function (stream) { return this; }; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + + return createReadableStreamAsyncIterator(this); + }; +} + Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function () { + get: function get() { return this._readableState.highWaterMark; } }); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. -// exposed for testing purposes only. Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. // This function is designed to be inlinable, so please take care when making // changes to the function body. + function fromList(n, state) { // nothing buffered if (state.length === 0) return null; - var ret; if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); state.buffer.clear(); } else { // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); + ret = state.buffer.consume(n, state.decoder); } - - return ret; -} - -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} - -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; return ret; } function endReadable(stream) { var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + debug('endReadable', state.endEmitted); if (!state.endEmitted) { state.ended = true; - pna.nextTick(endReadableNT, state, stream); + process.nextTick(endReadableNT, state, stream); } } function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { state.endEmitted = true; stream.readable = false; stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } } } +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; } + return -1; } \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js index fcfc105af8e9a1..41a738c4e93599 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js @@ -18,7 +18,6 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where @@ -60,40 +59,37 @@ // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. - 'use strict'; module.exports = Transform; -var Duplex = require('./_stream_duplex'); +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; -/**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ +var Duplex = require('./_stream_duplex'); -util.inherits(Transform, Duplex); +require('inherits')(Transform, Duplex); function afterTransform(er, data) { var ts = this._transformState; ts.transforming = false; - var cb = ts.writecb; - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); } ts.writechunk = null; ts.writecb = null; - if (data != null) // single equals check for both `null` and `undefined` this.push(data); - cb(er); - var rs = this._readableState; rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { this._read(rs.highWaterMark); } @@ -101,9 +97,7 @@ function afterTransform(er, data) { function Transform(options) { if (!(this instanceof Transform)) return new Transform(options); - Duplex.call(this, options); - this._transformState = { afterTransform: afterTransform.bind(this), needTransform: false, @@ -111,30 +105,27 @@ function Transform(options) { writecb: null, writechunk: null, writeencoding: null - }; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; + }; // start out asking for a readable event once data is transformed. - // we have implemented the _read method, and done the other things + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. + this._readableState.sync = false; if (options) { if (typeof options.transform === 'function') this._transform = options.transform; - if (typeof options.flush === 'function') this._flush = options.flush; - } + } // When the writable side finishes, then flush out anything remaining. + - // When the writable side finishes, then flush out anything remaining. this.on('prefinish', prefinish); } function prefinish() { var _this = this; - if (typeof this._flush === 'function') { + if (typeof this._flush === 'function' && !this._readableState.destroyed) { this._flush(function (er, data) { done(_this, er, data); }); @@ -146,9 +137,7 @@ function prefinish() { Transform.prototype.push = function (chunk, encoding) { this._transformState.needTransform = false; return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! +}; // This is the part where you do stuff! // override this function in implementation classes. // 'chunk' is an input chunk. // @@ -158,8 +147,10 @@ Transform.prototype.push = function (chunk, encoding) { // Call `cb(err)` when you are done with this chunk. If you pass // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. + + Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); }; Transform.prototype._write = function (chunk, encoding, cb) { @@ -167,20 +158,22 @@ Transform.prototype._write = function (chunk, encoding, cb) { ts.writecb = cb; ts.writechunk = chunk; ts.writeencoding = encoding; + if (!ts.transforming) { var rs = this._readableState; if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } -}; - -// Doesn't matter what the args are here. +}; // Doesn't matter what the args are here. // _transform does all the work. // That we got here means that the readable side wants more data. + + Transform.prototype._read = function (n) { var ts = this._transformState; - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + if (ts.writechunk !== null && !ts.transforming) { ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); } else { // mark that we need a transform, so that any data that comes in @@ -190,25 +183,19 @@ Transform.prototype._read = function (n) { }; Transform.prototype._destroy = function (err, cb) { - var _this2 = this; - Duplex.prototype._destroy.call(this, err, function (err2) { cb(err2); - _this2.emit('close'); }); }; function done(stream, er, data) { if (er) return stream.emit('error', er); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); - + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases // if there's nothing in the write buffer, then that means // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); - - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); return stream.push(null); } \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js index b0b02200cd7233..a2634d7c24fd5e 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js @@ -18,35 +18,29 @@ // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. - // A bit simpler than readable streams. // Implement an async ._write(chunk, encoding, cb), and it'll handle all // the drain event emission and buffering. - 'use strict'; -/**/ - -var pna = require('process-nextick-args'); -/**/ - module.exports = Writable; - /* */ + function WriteReq(chunk, encoding, cb) { this.chunk = chunk; this.encoding = encoding; this.callback = cb; this.next = null; -} - -// It seems a linked list but it is not +} // It seems a linked list but it is not // there will be only 2 of these for each stream + + function CorkedRequest(state) { var _this = this; this.next = null; this.entry = null; + this.finish = function () { onCorkedFinish(_this, state); }; @@ -54,266 +48,247 @@ function CorkedRequest(state) { /* */ /**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; -/**/ -/**/ + var Duplex; /**/ Writable.WritableState = WritableState; - /**/ -var util = Object.create(require('core-util-is')); -util.inherits = require('inherits'); -/**/ -/**/ var internalUtil = { deprecate: require('util-deprecate') }; /**/ /**/ + var Stream = require('./internal/streams/stream'); /**/ -/**/ -var Buffer = require('safe-buffer').Buffer; +var Buffer = require('buffer').Buffer; + var OurUint8Array = global.Uint8Array || function () {}; + function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } + function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } -/**/ - var destroyImpl = require('./internal/streams/destroy'); -util.inherits(Writable, Stream); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; -function nop() {} +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); +var errorOrDestroy = destroyImpl.errorOrDestroy; - options = options || {}; +require('inherits')(Writable, Stream); - // Duplex streams are both readable and writable, but share +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - // object stream flag to indicate whether or not this stream + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - // the point at which write() starts returning false + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); + this.needDrain = false; // at the start of calling end() - // if _final has been called - this.finalCalled = false; + this.ending = false; // when end() has been called, and returned - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; + this.ended = false; // when 'finish' is emitted - // has it been destroyed - this.destroyed = false; + this.finished = false; // has it been destroyed - // should we decode strings into buffers before passing to _write? + this.destroyed = false; // should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - // Crypto is kind of old and crusty. Historically, its default string + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - // not an actual buffer we keep track of, but a measurement + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. - this.length = 0; - // a flag to see when we're in the middle of a write. - this.writing = false; + this.length = 0; // a flag to see when we're in the middle of a write. - // when true all writes will be buffered until .uncork() call - this.corked = 0; + this.writing = false; // when true all writes will be buffered until .uncork() call - // a flag to be able to tell if the onwrite cb is called immediately, + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. - this.sync = true; - // a flag to know if we're processing previously buffered items, which + this.sync = true; // a flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - // the callback that's passed to _write(chunk,cb) + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { onwrite(stream, er); - }; + }; // the callback that the user supplies to write(chunk,encoding,cb) - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - // the amount that is being written when _write is called. - this.writelen = 0; + this.writecb = null; // the amount that is being written when _write is called. + this.writelen = 0; this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - // emit prefinish if the only thing we're waiting for is _write cbs + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams - this.prefinished = false; - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; + this.prefinished = false; // True if the error was already emitted and should not be thrown again - // count buffered requests - this.bufferedRequestCount = 0; + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. - // allocate the first CorkedRequest, there is always + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); } WritableState.prototype.getBuffer = function getBuffer() { var current = this.bufferedRequest; var out = []; + while (current) { out.push(current); current = current.next; } + return out; }; (function () { try { Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { + get: internalUtil.deprecate(function writableStateBufferGetter() { return this.getBuffer(); }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') }); } catch (_) {} -})(); - -// Test _writableState for inheritance to account for Duplex streams, +})(); // Test _writableState for inheritance to account for Duplex streams, // whose prototype chain only points to Readable. + + var realHasInstance; + if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { realHasInstance = Function.prototype[Symbol.hasInstance]; Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { + value: function value(object) { if (realHasInstance.call(this, object)) return true; if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; } }); } else { - realHasInstance = function (object) { + realHasInstance = function realHasInstance(object) { return object instanceof this; }; } function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, too. + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. // `realHasInstance` is necessary because using plain `instanceof` // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the // Node.js LazyTransform implementation, which has a non-trivial getter for // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); - } + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 - this._writableState = new WritableState(options, this); + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. - // legacy. this.writable = true; if (options) { if (typeof options.write === 'function') this._write = options.write; - if (typeof options.writev === 'function') this._writev = options.writev; - if (typeof options.destroy === 'function') this._destroy = options.destroy; - if (typeof options.final === 'function') this._final = options.final; } Stream.call(this); -} +} // Otherwise people can pipe Writable streams, which is just wrong. + -// Otherwise people can pipe Writable streams, which is just wrong. Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); -} + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb -// Checks that a user-supplied chunk is valid, especially for the particular + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular // mode the stream is in. Currently this means that `null` is never accepted // and undefined/non-string values are only allowed in object mode. + + function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; + var er; if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); } + if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; } - return valid; + + return true; } Writable.prototype.write = function (chunk, encoding, cb) { var state = this._writableState; var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); if (isBuf && !Buffer.isBuffer(chunk)) { @@ -326,21 +301,16 @@ Writable.prototype.write = function (chunk, encoding, cb) { } if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { state.pendingcb++; ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); } - return ret; }; Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; + this._writableState.corked++; }; Writable.prototype.uncork = function () { @@ -348,23 +318,33 @@ Writable.prototype.uncork = function () { if (state.corked) { state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); } }; Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { // node::ParseEncoding() requires lower case. if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); this._writableState.defaultEncoding = encoding; return this; }; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + function decodeChunk(state, chunk, encoding) { if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { chunk = Buffer.from(chunk, encoding); } + return chunk; } @@ -373,29 +353,28 @@ Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { // because otherwise some prototype manipulation in // userland will fail enumerable: false, - get: function () { + get: function get() { return this._writableState.highWaterMark; } -}); - -// if we're already writing something, then just put this +}); // if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. + function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { if (!isBuf) { var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { isBuf = true; encoding = 'buffer'; chunk = newChunk; } } - var len = state.objectMode ? 1 : chunk.length; + var len = state.objectMode ? 1 : chunk.length; state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. if (!ret) state.needDrain = true; if (state.writing || state.corked) { @@ -407,11 +386,13 @@ function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { callback: cb, next: null }; + if (last) { last.next = state.lastBufferedRequest; } else { state.bufferedRequest = state.lastBufferedRequest; } + state.bufferedRequestCount += 1; } else { doWrite(stream, state, false, len, chunk, encoding, cb); @@ -425,7 +406,7 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writecb = cb; state.writing = true; state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); state.sync = false; } @@ -435,20 +416,20 @@ function onwriteError(stream, state, sync, er, cb) { if (sync) { // defer the callback if we are being called synchronously // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen + process.nextTick(cb, er); // this can emit finish, and it will always happen // after error - pna.nextTick(finishMaybe, stream, state); + + process.nextTick(finishMaybe, stream, state); stream._writableState.errorEmitted = true; - stream.emit('error', er); + errorOrDestroy(stream, er); } else { // the caller expect this to happen before if // it is async cb(er); stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must + errorOrDestroy(stream, er); // this can emit finish, but finish must // always follow error + finishMaybe(stream, state); } } @@ -464,21 +445,18 @@ function onwrite(stream, er) { var state = stream._writableState; var sync = state.sync; var cb = state.writecb; - + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb);else { // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); + var finished = needFinish(state) || stream.destroyed; if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { clearBuffer(stream, state); } if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ + process.nextTick(afterWrite, stream, state, finished, cb); } else { afterWrite(stream, state, finished, cb); } @@ -490,19 +468,19 @@ function afterWrite(stream, state, finished, cb) { state.pendingcb--; cb(); finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't +} // Must force callback to be called on nextTick, so that we don't // emit 'drain' before the write() consumer gets the 'false' return // value, and has a chance to attach a 'drain' listener. + + function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; stream.emit('drain'); } -} +} // if there's something in the buffer waiting, then process it + -// if there's something in the buffer waiting, then process it function clearBuffer(stream, state) { state.bufferProcessing = true; var entry = state.bufferedRequest; @@ -513,29 +491,30 @@ function clearBuffer(stream, state) { var buffer = new Array(l); var holder = state.corkedRequestsFree; holder.entry = entry; - var count = 0; var allBuffers = true; + while (entry) { buffer[count] = entry; if (!entry.isBuf) allBuffers = false; entry = entry.next; count += 1; } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is almost always async, defer these to save a bit of time + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time // as the hot path ends with doWrite + state.pendingcb++; state.lastBufferedRequest = null; + if (holder.next) { state.corkedRequestsFree = holder.next; holder.next = null; } else { state.corkedRequestsFree = new CorkedRequest(state); } + state.bufferedRequestCount = 0; } else { // Slow case, write chunks one-by-one @@ -544,14 +523,13 @@ function clearBuffer(stream, state) { var encoding = entry.encoding; var cb = entry.callback; var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then // it means that we need to wait until it does. // also, that means that the chunk and cb are currently // being processed, so move the buffer counter past them. + if (state.writing) { break; } @@ -565,7 +543,7 @@ function clearBuffer(stream, state) { } Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); }; Writable.prototype._writev = null; @@ -582,38 +560,52 @@ Writable.prototype.end = function (chunk, encoding, cb) { encoding = null; } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks - // .end() fully uncorks if (state.corked) { state.corked = 1; this.uncork(); - } + } // ignore unnecessary end() calls. + - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); + if (!state.ending) endWritable(this, state, cb); + return this; }; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + function needFinish(state) { return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } + function callFinal(stream, state) { stream._final(function (err) { state.pendingcb--; + if (err) { - stream.emit('error', err); + errorOrDestroy(stream, err); } + state.prefinished = true; stream.emit('prefinish'); finishMaybe(stream, state); }); } + function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { + if (typeof stream._final === 'function' && !state.destroyed) { state.pendingcb++; state.finalCalled = true; - pna.nextTick(callFinal, stream, state); + process.nextTick(callFinal, stream, state); } else { state.prefinished = true; stream.emit('prefinish'); @@ -623,22 +615,37 @@ function prefinish(stream, state) { function finishMaybe(stream, state) { var need = needFinish(state); + if (need) { prefinish(stream, state); + if (state.pendingcb === 0) { state.finished = true; stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } } } + return need; } function endWritable(stream, state, cb) { state.ending = true; finishMaybe(stream, state); + if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); } + state.ended = true; stream.writable = false; } @@ -646,42 +653,45 @@ function endWritable(stream, state, cb) { function onCorkedFinish(corkReq, state, err) { var entry = corkReq.entry; corkReq.entry = null; + while (entry) { var cb = entry.callback; state.pendingcb--; cb(err); entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = corkReq; - } else { - state.corkedRequestsFree = corkReq; - } + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; } Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { if (this._writableState === undefined) { return false; } + return this._writableState.destroyed; }, - set: function (value) { + set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._writableState) { return; - } - - // backward compatibility, the user is explicitly + } // backward compatibility, the user is explicitly // managing destroyed + + this._writableState.destroyed = value; } }); - Writable.prototype.destroy = destroyImpl.destroy; Writable.prototype._undestroy = destroyImpl.undestroy; + Writable.prototype._destroy = function (err, cb) { - this.end(); cb(err); }; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/BufferList.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/BufferList.js deleted file mode 100644 index aefc68bd90b9c2..00000000000000 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/BufferList.js +++ /dev/null @@ -1,79 +0,0 @@ -'use strict'; - -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - -var Buffer = require('safe-buffer').Buffer; -var util = require('util'); - -function copyBuffer(src, target, offset) { - src.copy(target, offset); -} - -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); - - this.head = null; - this.tail = null; - this.length = 0; - } - - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; - - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; - - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; - - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; - - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; - - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - if (this.length === 1) return this.head.data; - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; - - return BufferList; -}(); - -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; -} \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000000000..9fb615a2f3bc44 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,207 @@ +'use strict'; + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = require('./end-of-stream'); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000000000..cdea425f19dd96 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,210 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = require('buffer'), + Buffer = _require.Buffer; + +var _require2 = require('util'), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js index 5a0a0d88cec6f3..3268a16f3b6f23 100644 --- a/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -1,11 +1,5 @@ -'use strict'; +'use strict'; // undocumented cb() API, needed for core, not for public API -/**/ - -var pna = require('process-nextick-args'); -/**/ - -// undocumented cb() API, needed for core, not for public API function destroy(err, cb) { var _this = this; @@ -15,38 +9,61 @@ function destroy(err, cb) { if (readableDestroyed || writableDestroyed) { if (cb) { cb(err); - } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { - pna.nextTick(emitErrorNT, this, err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } } - return this; - } - // we set destroyed to true before firing error callbacks in order + return this; + } // we set destroyed to true before firing error callbacks in order // to make it re-entrance safe in case destroy() is called within callbacks + if (this._readableState) { this._readableState.destroyed = true; - } + } // if this is a duplex stream mark the writable part as destroyed as well + - // if this is a duplex stream mark the writable part as destroyed as well if (this._writableState) { this._writableState.destroyed = true; } this._destroy(err || null, function (err) { if (!cb && err) { - pna.nextTick(emitErrorNT, _this, err); - if (_this._writableState) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); } } else if (cb) { + process.nextTick(emitCloseNT, _this); cb(err); + } else { + process.nextTick(emitCloseNT, _this); } }); return this; } +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + function undestroy() { if (this._readableState) { this._readableState.destroyed = false; @@ -59,6 +76,8 @@ function undestroy() { this._writableState.destroyed = false; this._writableState.ended = false; this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; this._writableState.finished = false; this._writableState.errorEmitted = false; } @@ -68,7 +87,19 @@ function emitErrorNT(self, err) { self.emit('error', err); } +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + module.exports = { destroy: destroy, - undestroy: undestroy + undestroy: undestroy, + errorOrDestroy: errorOrDestroy }; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000000000..831f286d98fa95 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,104 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/from-browser.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000000000..a4ce56f3c90f60 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000000000..6c41284416799c --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000000000..6589909889c585 --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,97 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js b/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000000000..19887eb8a9070e --- /dev/null +++ b/deps/npm/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,27 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/deps/npm/node_modules/readable-stream/package.json b/deps/npm/node_modules/readable-stream/package.json index 2afa6fbd81e225..0b0c4bd207ace3 100644 --- a/deps/npm/node_modules/readable-stream/package.json +++ b/deps/npm/node_modules/readable-stream/package.json @@ -1,31 +1,46 @@ { "name": "readable-stream", - "version": "2.3.7", + "version": "3.6.0", "description": "Streams3, a user-land copy of the stream library from Node.js", "main": "readable.js", + "engines": { + "node": ">= 6" + }, "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" }, "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", "assert": "^1.4.0", - "babel-polyfill": "^6.9.1", - "buffer": "^4.9.0", - "lolex": "^2.3.2", - "nyc": "^6.4.0", - "tap": "^0.7.0", - "tape": "^4.8.0" + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" }, "scripts": { - "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", "cover": "nyc npm test", - "report": "nyc report --reporter=lcov" + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" }, "repository": { "type": "git", @@ -38,9 +53,10 @@ ], "browser": { "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", "./readable.js": "./readable-browser.js", - "./writable.js": "./writable-browser.js", - "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" }, "nyc": { diff --git a/deps/npm/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/readable-stream/passthrough.js deleted file mode 100644 index ffd791d7ff275a..00000000000000 --- a/deps/npm/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').PassThrough diff --git a/deps/npm/node_modules/readable-stream/readable-browser.js b/deps/npm/node_modules/readable-stream/readable-browser.js index e50372592ee6c6..adbf60de832f9d 100644 --- a/deps/npm/node_modules/readable-stream/readable-browser.js +++ b/deps/npm/node_modules/readable-stream/readable-browser.js @@ -5,3 +5,5 @@ exports.Writable = require('./lib/_stream_writable.js'); exports.Duplex = require('./lib/_stream_duplex.js'); exports.Transform = require('./lib/_stream_transform.js'); exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/deps/npm/node_modules/readable-stream/readable.js b/deps/npm/node_modules/readable-stream/readable.js index ec89ec53306497..9e0ca120ded827 100644 --- a/deps/npm/node_modules/readable-stream/readable.js +++ b/deps/npm/node_modules/readable-stream/readable.js @@ -1,13 +1,8 @@ var Stream = require('stream'); if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; } else { exports = module.exports = require('./lib/_stream_readable.js'); exports.Stream = Stream || exports; @@ -16,4 +11,6 @@ if (process.env.READABLE_STREAM === 'disable' && Stream) { exports.Duplex = require('./lib/_stream_duplex.js'); exports.Transform = require('./lib/_stream_transform.js'); exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); } diff --git a/deps/npm/node_modules/readable-stream/transform.js b/deps/npm/node_modules/readable-stream/transform.js deleted file mode 100644 index b1baba26da03dc..00000000000000 --- a/deps/npm/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./readable').Transform diff --git a/deps/npm/node_modules/readable-stream/writable-browser.js b/deps/npm/node_modules/readable-stream/writable-browser.js deleted file mode 100644 index ebdde6a85dcb19..00000000000000 --- a/deps/npm/node_modules/readable-stream/writable-browser.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/_stream_writable.js'); diff --git a/deps/npm/node_modules/readable-stream/writable.js b/deps/npm/node_modules/readable-stream/writable.js deleted file mode 100644 index 3211a6f80d1abc..00000000000000 --- a/deps/npm/node_modules/readable-stream/writable.js +++ /dev/null @@ -1,8 +0,0 @@ -var Stream = require("stream") -var Writable = require("./lib/_stream_writable.js") - -if (process.env.READABLE_STREAM === 'disable') { - module.exports = Stream && Stream.Writable || Writable -} else { - module.exports = Writable -} diff --git a/deps/npm/node_modules/safe-buffer/index.js b/deps/npm/node_modules/safe-buffer/index.js index 22438dabbbceef..f8d3ec98852f44 100644 --- a/deps/npm/node_modules/safe-buffer/index.js +++ b/deps/npm/node_modules/safe-buffer/index.js @@ -1,3 +1,4 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ /* eslint-disable node/no-deprecated-api */ var buffer = require('buffer') var Buffer = buffer.Buffer @@ -20,6 +21,8 @@ function SafeBuffer (arg, encodingOrOffset, length) { return Buffer(arg, encodingOrOffset, length) } +SafeBuffer.prototype = Object.create(Buffer.prototype) + // Copy static methods from Buffer copyProps(Buffer, SafeBuffer) diff --git a/deps/npm/node_modules/safe-buffer/package.json b/deps/npm/node_modules/safe-buffer/package.json index 623fbc3f6b0c48..f2869e256477a9 100644 --- a/deps/npm/node_modules/safe-buffer/package.json +++ b/deps/npm/node_modules/safe-buffer/package.json @@ -1,18 +1,18 @@ { "name": "safe-buffer", "description": "Safer Node.js Buffer API", - "version": "5.1.2", + "version": "5.2.1", "author": { "name": "Feross Aboukhadijeh", "email": "feross@feross.org", - "url": "http://feross.org" + "url": "https://feross.org" }, "bugs": { "url": "https://github.com/feross/safe-buffer/issues" }, "devDependencies": { "standard": "*", - "tape": "^4.0.0" + "tape": "^5.0.0" }, "homepage": "https://github.com/feross/safe-buffer", "keywords": [ @@ -33,5 +33,19 @@ }, "scripts": { "test": "standard && tape test/*.js" - } + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] } diff --git a/deps/npm/node_modules/string_decoder/package.json b/deps/npm/node_modules/string_decoder/package.json index 518c3eb9fb1ffb..b2bb141160cad3 100644 --- a/deps/npm/node_modules/string_decoder/package.json +++ b/deps/npm/node_modules/string_decoder/package.json @@ -1,10 +1,13 @@ { "name": "string_decoder", - "version": "1.1.1", + "version": "1.3.0", "description": "The string_decoder module from Node core", "main": "lib/string_decoder.js", + "files": [ + "lib" + ], "dependencies": { - "safe-buffer": "~5.1.0" + "safe-buffer": "~5.2.0" }, "devDependencies": { "babel-polyfill": "^6.23.0", diff --git a/deps/npm/package.json b/deps/npm/package.json index d5f3cf54cf89c6..20b80c7ebe21c7 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "7.21.1", + "version": "7.23.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -53,9 +53,9 @@ "./package.json": "./package.json" }, "dependencies": { - "@npmcli/arborist": "^2.8.2", + "@npmcli/arborist": "^2.8.3", "@npmcli/ci-detect": "^1.2.0", - "@npmcli/config": "^2.2.0", + "@npmcli/config": "^2.3.0", "@npmcli/map-workspaces": "^1.0.4", "@npmcli/package-json": "^1.0.1", "@npmcli/run-script": "^1.8.6", @@ -97,18 +97,19 @@ "node-gyp": "^7.1.2", "nopt": "^5.0.0", "npm-audit-report": "^2.1.5", + "npm-install-checks": "^4.0.0", "npm-package-arg": "^8.1.5", "npm-pick-manifest": "^6.1.1", "npm-profile": "^5.0.3", "npm-registry-fetch": "^11.0.0", "npm-user-validate": "^1.0.1", - "npmlog": "^5.0.0", + "npmlog": "^5.0.1", "opener": "^1.5.2", "pacote": "^11.3.5", "parse-conflict-json": "^1.1.1", "qrcode-terminal": "^0.12.0", "read": "~1.0.7", - "read-package-json": "^4.0.1", + "read-package-json": "^4.1.1", "read-package-json-fast": "^2.0.3", "readdir-scoped-modules": "^1.1.0", "rimraf": "^3.0.2", @@ -167,6 +168,7 @@ "node-gyp", "nopt", "npm-audit-report", + "npm-install-checks", "npm-package-arg", "npm-pick-manifest", "npm-profile", diff --git a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs index 7bf67868a79ecb..4eb5ea3bc5df59 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs @@ -5,6 +5,48 @@ * Make sure to inspect the output below. Do not ignore changes! */ 'use strict' +exports[`test/lib/utils/error-message.js TAP 404 cleans sensitive info from package id > must match snapshot 1`] = ` +Object { + "detail": Array [ + Array [ + "404", + "", + ], + Array [ + "404", + "", + "'http://evil:***@npmjs.org/not-found' is not in this registry.", + ], + Array [ + "404", + "This package name is not valid, because", + "", + ], + Array [ + "404", + " 1. name can only contain URL-friendly characters", + ], + Array [ + "404", + String( + + Note that you can also install from a + ), + ], + Array [ + "404", + "tarball, folder, http url, or git url.", + ], + ], + "summary": Array [ + Array [ + "404", + "not found", + ], + ], +} +` + exports[`test/lib/utils/error-message.js TAP 404 name with error > must match snapshot 1`] = ` Object { "detail": Array [ @@ -15,7 +57,7 @@ Object { Array [ "404", "", - "'node_modules' is not in the npm registry.", + "'node_modules' is not in this registry.", ], Array [ "404", @@ -57,7 +99,7 @@ Object { Array [ "404", "", - "'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' is not in the npm registry.", + "'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' is not in this registry.", ], Array [ "404", @@ -111,7 +153,7 @@ Object { Array [ "404", "", - "'yolo' is not in the npm registry.", + "'yolo' is not in this registry.", ], Array [ "404", diff --git a/deps/npm/test/lib/install.js b/deps/npm/test/lib/install.js index 6412b34c16f251..2cbee02e67b287 100644 --- a/deps/npm/test/lib/install.js +++ b/deps/npm/test/lib/install.js @@ -126,6 +126,146 @@ t.test('should install globally using Arborist', (t) => { }) }) +t.test('npm i -g npm engines check success', (t) => { + const Install = t.mock('../../lib/install.js', { + '../../lib/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function () { + this.reify = () => {} + }, + pacote: { + manifest: () => { + return { + version: '100.100.100', + engines: { + node: '>1', + }, + } + }, + }, + }) + const npm = mockNpm({ + globalDir: 'path/to/node_modules/', + config: { + global: true, + }, + }) + const install = new Install(npm) + install.exec(['npm'], er => { + if (er) + throw er + t.end() + }) +}) + +t.test('npm i -g npm engines check failure', (t) => { + const Install = t.mock('../../lib/install.js', { + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, + }, + }) + const npm = mockNpm({ + globalDir: 'path/to/node_modules/', + config: { + global: true, + }, + }) + const install = new Install(npm) + install.exec(['npm'], er => { + t.match(er, { + message: 'Unsupported engine', + pkgid: 'npm@1.2.3', + current: { + node: process.version, + npm: '100.100.100', + }, + required: { + node: '>1000', + }, + code: 'EBADENGINE', + }) + t.end() + }) +}) + +t.test('npm i -g npm engines check failure forced override', (t) => { + const Install = t.mock('../../lib/install.js', { + '../../lib/utils/reify-finish.js': async () => {}, + '@npmcli/arborist': function () { + this.reify = () => {} + }, + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, + }, + }) + const npm = mockNpm({ + globalDir: 'path/to/node_modules/', + config: { + force: true, + global: true, + }, + }) + const install = new Install(npm) + install.exec(['npm'], er => { + if (er) + throw er + t.end() + }) +}) + +t.test('npm i -g npm@version engines check failure', (t) => { + const Install = t.mock('../../lib/install.js', { + pacote: { + manifest: () => { + return { + _id: 'npm@1.2.3', + version: '100.100.100', + engines: { + node: '>1000', + }, + } + }, + }, + }) + const npm = mockNpm({ + globalDir: 'path/to/node_modules/', + config: { + global: true, + }, + }) + const install = new Install(npm) + install.exec(['npm@100'], er => { + t.match(er, { + message: 'Unsupported engine', + pkgid: 'npm@1.2.3', + current: { + node: process.version, + npm: '100.100.100', + }, + required: { + node: '>1000', + }, + code: 'EBADENGINE', + }) + t.end() + }) +}) + t.test('completion to folder', async t => { const Install = t.mock('../../lib/install.js', { '../../lib/utils/reify-finish.js': async () => {}, diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js index 07328d588759b5..d1c67a95137c44 100644 --- a/deps/npm/test/lib/utils/error-message.js +++ b/deps/npm/test/lib/utils/error-message.js @@ -423,6 +423,14 @@ t.test('404', t => { t.matchSnapshot(errorMessage(er, npm)) t.end() }) + t.test('cleans sensitive info from package id', t => { + const er = Object.assign(new Error('404 not found'), { + pkgid: 'http://evil:password@npmjs.org/not-found', + code: 'E404', + }) + t.matchSnapshot(errorMessage(er, npm)) + t.end() + }) t.end() }) From 81cb14bb589947e1e2e1de7459e549f7309734d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gerhard=20St=C3=B6bich?= <18708370+Flarna@users.noreply.github.com> Date: Fri, 10 Sep 2021 20:31:33 +0200 Subject: [PATCH 58/95] doc: clarify that ObjectWrap requires manual cleanup on shutdown Clarify that ObjectWrap instances are not destroyed on process or worker shutdown and require manual destruction to avoid resource leaks. PR-URL: https://github.com/nodejs/node/pull/40074 Fixes: https://github.com/nodejs/node/issues/38816 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- doc/api/addons.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/api/addons.md b/doc/api/addons.md index b17e86febee671..c2cdee07ba27af 100644 --- a/doc/api/addons.md +++ b/doc/api/addons.md @@ -965,6 +965,10 @@ provided by the underlying V8 JavaScript engine. They are subject to change or removal at any time. They are not documented by Node.js or V8, and they should never be used outside of testing. +During shutdown of the process or worker threads destructors are not called +by the JS engine. Therefore it's the responsibility of the user to track +these objects and ensure proper destruction to avoid resource leaks. + ### Factory of wrapped objects Alternatively, it is possible to use a factory pattern to avoid explicitly From 31994fbf8e9e7c6f59a78c14d687e82c877ba660 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Tue, 7 Sep 2021 02:44:14 +0800 Subject: [PATCH 59/95] src: register zlib external references for snapshot MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40050 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Juan José Arboleda Reviewed-By: Tobias Nießen Reviewed-By: Michael Dawson Reviewed-By: Khaidi Chu --- src/node_external_reference.h | 1 + src/node_zlib.cc | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 4ebaa8d27feae8..f51999e996fd06 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -81,6 +81,7 @@ class ExternalReferenceRegistry { V(types) \ V(uv) \ V(v8) \ + V(zlib) \ V(worker) #if NODE_HAVE_I18N_SUPPORT diff --git a/src/node_zlib.cc b/src/node_zlib.cc index b8733229b0b06b..ec0a8e90f1cbcf 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -25,6 +25,7 @@ #include "async_wrap-inl.h" #include "env-inl.h" +#include "node_external_reference.h" #include "threadpoolwork-inl.h" #include "util-inl.h" @@ -1266,6 +1267,16 @@ struct MakeClass { env->SetConstructorFunction(target, name, z); } + + static void Make(ExternalReferenceRegistry* registry) { + registry->Register(Stream::New); + registry->Register(Stream::template Write); + registry->Register(Stream::template Write); + registry->Register(Stream::Close); + registry->Register(Stream::Init); + registry->Register(Stream::Params); + registry->Register(Stream::Reset); + } }; void Initialize(Local target, @@ -1283,6 +1294,12 @@ void Initialize(Local target, FIXED_ONE_BYTE_STRING(env->isolate(), ZLIB_VERSION)).Check(); } +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + MakeClass::Make(registry); + MakeClass::Make(registry); + MakeClass::Make(registry); +} + } // anonymous namespace void DefineZlibConstants(Local target) { @@ -1408,3 +1425,4 @@ void DefineZlibConstants(Local target) { } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(zlib, node::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(zlib, node::RegisterExternalReferences) From bc9c2ca6af3478ab3a79674dd203bbb69b8c2039 Mon Sep 17 00:00:00 2001 From: shfshanyue Date: Tue, 14 Sep 2021 11:25:35 +0800 Subject: [PATCH 60/95] http: remove CRLF variable PR-URL: https://github.com/nodejs/node/pull/40101 Reviewed-By: Matteo Collina Reviewed-By: Michael Dawson --- lib/_http_common.js | 2 +- lib/_http_outgoing.js | 30 +++++++++++++++--------------- lib/_http_server.js | 19 +++++++++---------- 3 files changed, 25 insertions(+), 26 deletions(-) diff --git a/lib/_http_common.js b/lib/_http_common.js index 642cdea41f68b1..796deeff055767 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -268,7 +268,7 @@ module.exports = { _checkIsHttpToken: checkIsHttpToken, chunkExpression: /(?:^|\W)chunked(?:$|\W)/i, continueExpression: /(?:^|\W)100-continue(?:$|\W)/i, - CRLF: '\r\n', + CRLF: '\r\n', // TODO: Deprecate this. freeParser, methods, parsers, diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index c6f68d4329c7da..25fe8fb1ede73f 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -45,9 +45,11 @@ const Stream = require('stream'); const internalUtil = require('internal/util'); const { kOutHeaders, utcDate, kNeedDrain } = require('internal/http'); const { Buffer } = require('buffer'); -const common = require('_http_common'); -const checkIsHttpToken = common._checkIsHttpToken; -const checkInvalidHeaderChar = common._checkInvalidHeaderChar; +const { + _checkIsHttpToken: checkIsHttpToken, + _checkInvalidHeaderChar: checkInvalidHeaderChar, + chunkExpression: RE_TE_CHUNKED, +} = require('_http_common'); const { defaultTriggerAsyncIdScope, symbols: { async_id_symbol } @@ -78,14 +80,12 @@ let debug = require('internal/util/debuglog').debuglog('http', (fn) => { }); const HIGH_WATER_MARK = getDefaultHighWaterMark(); -const { CRLF } = common; const kCorked = Symbol('corked'); const nop = () => {}; const RE_CONN_CLOSE = /(?:^|\W)close(?:$|\W)/i; -const RE_TE_CHUNKED = common.chunkExpression; // isCookieField performs a case-insensitive comparison of a provided string // against the word "cookie." As of V8 6.6 this is faster than handrolling or @@ -417,7 +417,7 @@ function _storeHeader(firstLine, headers) { // Date header if (this.sendDate && !state.date) { - header += 'Date: ' + utcDate() + CRLF; + header += 'Date: ' + utcDate() + '\r\n'; } // Force the connection to close when the response is a 204 No Content or @@ -447,14 +447,14 @@ function _storeHeader(firstLine, headers) { const shouldSendKeepAlive = this.shouldKeepAlive && (state.contLen || this.useChunkedEncodingByDefault || this.agent); if (shouldSendKeepAlive) { - header += 'Connection: keep-alive' + CRLF; + header += 'Connection: keep-alive\r\n'; if (this._keepAliveTimeout && this._defaultKeepAlive) { const timeoutSeconds = MathFloor(this._keepAliveTimeout / 1000); - header += `Keep-Alive: timeout=${timeoutSeconds}${CRLF}`; + header += `Keep-Alive: timeout=${timeoutSeconds}\r\n`; } } else { this._last = true; - header += 'Connection: close' + CRLF; + header += 'Connection: close\r\n'; } } @@ -467,9 +467,9 @@ function _storeHeader(firstLine, headers) { } else if (!state.trailer && !this._removedContLen && typeof this._contentLength === 'number') { - header += 'Content-Length: ' + this._contentLength + CRLF; + header += 'Content-Length: ' + this._contentLength + '\r\n'; } else if (!this._removedTE) { - header += 'Transfer-Encoding: chunked' + CRLF; + header += 'Transfer-Encoding: chunked\r\n'; this.chunkedEncoding = true; } else { // We should only be able to get here if both Content-Length and @@ -487,7 +487,7 @@ function _storeHeader(firstLine, headers) { throw new ERR_HTTP_TRAILER_INVALID(); } - this._header = header + CRLF; + this._header = header + '\r\n'; this._headerSent = false; // Wait until the first body chunk, or close(), is sent to flush, @@ -514,7 +514,7 @@ function processHeader(self, state, key, value, validate) { function storeHeader(self, state, key, value, validate) { if (validate) validateHeaderValue(key, value); - state.header += key + ': ' + value + CRLF; + state.header += key + ': ' + value + '\r\n'; matchHeader(self, state, key, value); } @@ -694,7 +694,7 @@ ObjectDefineProperty(OutgoingMessage.prototype, 'writableNeedDrain', { } }); -const crlf_buf = Buffer.from(CRLF); +const crlf_buf = Buffer.from('\r\n'); OutgoingMessage.prototype.write = function write(chunk, encoding, callback) { if (typeof encoding === 'function') { callback = encoding; @@ -818,7 +818,7 @@ OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { debug('Trailer "%s" contains invalid characters', field); throw new ERR_INVALID_CHAR('trailer content', field); } - this._trailer += field + ': ' + value + CRLF; + this._trailer += field + ': ' + value + '\r\n'; } }; diff --git a/lib/_http_server.js b/lib/_http_server.js index 11119169d56f2c..3ef1b4ad5d4735 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -37,7 +37,6 @@ const assert = require('internal/assert'); const { parsers, freeParser, - CRLF, continueExpression, chunkExpression, kIncomingMessage, @@ -254,12 +253,12 @@ ServerResponse.prototype.detachSocket = function detachSocket(socket) { }; ServerResponse.prototype.writeContinue = function writeContinue(cb) { - this._writeRaw(`HTTP/1.1 100 Continue${CRLF}${CRLF}`, 'ascii', cb); + this._writeRaw('HTTP/1.1 100 Continue\r\n\r\n', 'ascii', cb); this._sent100 = true; }; ServerResponse.prototype.writeProcessing = function writeProcessing(cb) { - this._writeRaw(`HTTP/1.1 102 Processing${CRLF}${CRLF}`, 'ascii', cb); + this._writeRaw('HTTP/1.1 102 Processing\r\n\r\n', 'ascii', cb); }; ServerResponse.prototype._implicitHeader = function _implicitHeader() { @@ -322,7 +321,7 @@ function writeHead(statusCode, reason, obj) { if (checkInvalidHeaderChar(this.statusMessage)) throw new ERR_INVALID_CHAR('statusMessage'); - const statusLine = `HTTP/1.1 ${statusCode} ${this.statusMessage}${CRLF}`; + const statusLine = `HTTP/1.1 ${statusCode} ${this.statusMessage}\r\n`; if (statusCode === 204 || statusCode === 304 || (statusCode >= 100 && statusCode <= 199)) { @@ -648,16 +647,16 @@ function onParserTimeout(server, socket) { const noop = () => {}; const badRequestResponse = Buffer.from( - `HTTP/1.1 400 ${STATUS_CODES[400]}${CRLF}` + - `Connection: close${CRLF}${CRLF}`, 'ascii' + `HTTP/1.1 400 ${STATUS_CODES[400]}\r\n` + + 'Connection: close\r\n\r\n', 'ascii' ); const requestTimeoutResponse = Buffer.from( - `HTTP/1.1 408 ${STATUS_CODES[408]}${CRLF}` + - `Connection: close${CRLF}${CRLF}`, 'ascii' + `HTTP/1.1 408 ${STATUS_CODES[408]}\r\n` + + 'Connection: close\r\n\r\n', 'ascii' ); const requestHeaderFieldsTooLargeResponse = Buffer.from( - `HTTP/1.1 431 ${STATUS_CODES[431]}${CRLF}` + - `Connection: close${CRLF}${CRLF}`, 'ascii' + `HTTP/1.1 431 ${STATUS_CODES[431]}\r\n` + + 'Connection: close\r\n\r\n', 'ascii' ); function socketOnError(e) { // Ignore further errors From 588257c00a7dfef96582a72c55d1bc14a3c3db97 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 13 Sep 2021 21:00:17 -0700 Subject: [PATCH 61/95] meta: add .mailmap entry for arcanis MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40103 Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Luigi Pinca --- .mailmap | 1 + AUTHORS | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.mailmap b/.mailmap index 80bc179f8ac66a..5f59ca209638bb 100644 --- a/.mailmap +++ b/.mailmap @@ -250,6 +250,7 @@ Leeseean Chiu Luke Bayes Lydia Kats Maciej Małecki +Maël Nison MaleDong Malte-Thorben Bruns Malte-Thorben Bruns diff --git a/AUTHORS b/AUTHORS index a99cea5a6e28f5..ea89b08c378956 100644 --- a/AUTHORS +++ b/AUTHORS @@ -2691,7 +2691,7 @@ Evan Plaice simon3000 Marcos Casagrande Ruwan Geeganage -Maël Nison +Maël Nison Gerson Niño freestraws Daniel Beckert @@ -2941,7 +2941,6 @@ Marek Łabuz Reza Fatahi Priyanka Kore Jan-Philip Gehrcke -Maël Nison qualitymanifest Rosen Penev Jeremy Albright From 9c76c699720df354d2c6a3cb8c6cd9d2a9a3f5c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Tue, 14 Sep 2021 15:17:57 +0200 Subject: [PATCH 62/95] deps: patch V8 to 9.3.345.19 Refs: https://github.com/v8/v8/compare/9.3.345.16...9.3.345.19 PR-URL: https://github.com/nodejs/node/pull/40108 Reviewed-By: Richard Lau Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- deps/v8/include/v8-version.h | 2 +- .../js-native-context-specialization.cc | 18 ++++++------- deps/v8/testing/gmock/BUILD.gn | 7 +----- deps/v8/tools/mb/mb.py | 25 +++++++++++++++++-- 4 files changed, 32 insertions(+), 20 deletions(-) diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index 213d8805b86087..30a4182357505d 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 9 #define V8_MINOR_VERSION 3 #define V8_BUILD_NUMBER 345 -#define V8_PATCH_LEVEL 16 +#define V8_PATCH_LEVEL 19 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/compiler/js-native-context-specialization.cc b/deps/v8/src/compiler/js-native-context-specialization.cc index 61fa46e94dfd41..30cab3ae26ac38 100644 --- a/deps/v8/src/compiler/js-native-context-specialization.cc +++ b/deps/v8/src/compiler/js-native-context-specialization.cc @@ -825,6 +825,12 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess( return NoChange(); } else if (property_cell_type == PropertyCellType::kUndefined) { return NoChange(); + } else if (property_cell_type == PropertyCellType::kConstantType) { + // We rely on stability further below. + if (property_cell_value.IsHeapObject() && + !property_cell_value.AsHeapObject().map().is_stable()) { + return NoChange(); + } } } else if (access_mode == AccessMode::kHas) { DCHECK_EQ(receiver, lookup_start_object); @@ -943,17 +949,7 @@ Reduction JSNativeContextSpecialization::ReduceGlobalAccess( if (property_cell_value.IsHeapObject()) { MapRef property_cell_value_map = property_cell_value.AsHeapObject().map(); - if (property_cell_value_map.is_stable()) { - dependencies()->DependOnStableMap(property_cell_value_map); - } else { - // The value's map is already unstable. If this store were to go - // through the C++ runtime, it would transition the PropertyCell to - // kMutable. We don't want to change the cell type from generated - // code (to simplify concurrent heap access), however, so we keep - // it as kConstantType and do the store anyways (if the new value's - // map matches). This is safe because it merely prolongs the limbo - // state that we are in already. - } + dependencies()->DependOnStableMap(property_cell_value_map); // Check that the {value} is a HeapObject. value = effect = graph()->NewNode(simplified()->CheckHeapObject(), diff --git a/deps/v8/testing/gmock/BUILD.gn b/deps/v8/testing/gmock/BUILD.gn index de5ae539093257..c20d33511a1c1f 100644 --- a/deps/v8/testing/gmock/BUILD.gn +++ b/deps/v8/testing/gmock/BUILD.gn @@ -15,12 +15,7 @@ source_set("gmock") { "include/gmock/gmock-matchers.h", "include/gmock/gmock.h", ] - deps = [ "//third_party/googletest:gmock" ] - - public_configs = [ - "//third_party/googletest:gmock_config", - "//third_party/googletest:gtest_config", - ] + public_deps = [ "//third_party/googletest:gmock" ] } # The file/directory layout of Google Test is not yet considered stable. Until diff --git a/deps/v8/tools/mb/mb.py b/deps/v8/tools/mb/mb.py index 7031ba50dbdb4c..42ed60c7ef644b 100755 --- a/deps/v8/tools/mb/mb.py +++ b/deps/v8/tools/mb/mb.py @@ -53,6 +53,25 @@ def cmp(x, y): # pylint: disable=redefined-builtin return (x > y) - (x < y) +def _v8_builder_fallback(builder, builder_group): + """Fallback to V8 builder names before splitting builder/tester. + + This eases splitting builders and testers on release branches and + can be removed as soon as all builder have been split and all MB configs + exist on all branches. + """ + builders = [builder] + if builder.endswith(' - builder'): + builders.append(builder[:-len(' - builder')]) + elif builder.endswith(' builder'): + builders.append(builder[:-len(' builder')]) + + for builder in builders: + if builder in builder_group: + return builder_group[builder] + return None + + def main(args): mbw = MetaBuildWrapper() return mbw.Main(args) @@ -651,12 +670,14 @@ def ConfigFromArgs(self): raise MBErr('Builder groups name "%s" not found in "%s"' % (self.args.builder_group, self.args.config_file)) - if not self.args.builder in self.builder_groups[self.args.builder_group]: + config = _v8_builder_fallback( + self.args.builder, self.builder_groups[self.args.builder_group]) + + if not config: raise MBErr( 'Builder name "%s" not found under builder_groups[%s] in "%s"' % (self.args.builder, self.args.builder_group, self.args.config_file)) - config = self.builder_groups[self.args.builder_group][self.args.builder] if isinstance(config, dict): if self.args.phase is None: raise MBErr('Must specify a build --phase for %s on %s' % From 886921de3810e2f6d0b14fcb179060995be79f28 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 14 Sep 2021 07:21:06 -0700 Subject: [PATCH 63/95] build: add paths-ignore for build-tarball workflow In GitHub Actions, don't run build-tarball if the only changed files are docs. PR-URL: https://github.com/nodejs/node/pull/40109 Reviewed-By: Antoine du Hamel Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- .github/workflows/build-tarball.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 37f4e707561606..0e70aaaa5cd1fd 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -3,12 +3,22 @@ name: Build from tarball on: pull_request: types: [opened, synchronize, reopened, ready_for_review] + paths-ignore: + - '.mailmap' + - '**.md' + - 'AUTHORS' + - 'doc/**' push: branches: - master - main - v[0-9]+.x-staging - v[0-9]+.x + paths-ignore: + - '.mailmap' + - '**.md' + - 'AUTHORS' + - 'doc/**' env: FLAKY_TESTS: dontcare From 9793e7ff08ff9b652a5083cdd85147fac3c1ff1d Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 14 Sep 2021 07:23:34 -0700 Subject: [PATCH 64/95] build: add .mailmap/AUTHORS to path-ignore for test-asan PR-URL: https://github.com/nodejs/node/pull/40109 Reviewed-By: Antoine du Hamel Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- .github/workflows/test-asan.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index 2762a08fa10bc8..8745f5eb4c0f9a 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -1,6 +1,13 @@ name: test-asan on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + paths-ignore: + - '.mailmap' + - '**.md' + - 'AUTHORS' + - 'doc/**' push: branches: - master @@ -9,12 +16,9 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: + - '.mailmap' - '**.md' - - 'doc/**' - pull_request: - types: [opened, synchronize, reopened, ready_for_review] - paths-ignore: - - '**.md' + - 'AUTHORS' - 'doc/**' env: From 8d5787a043d10755926ed4465d67f3f246725795 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 14 Sep 2021 07:23:54 -0700 Subject: [PATCH 65/95] build: add .mailmap/AUTHORS to paths-ignore for test-macos PR-URL: https://github.com/nodejs/node/pull/40109 Reviewed-By: Antoine du Hamel Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- .github/workflows/test-macos.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index adcacc9b99187c..a099a25285c278 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -4,7 +4,9 @@ on: pull_request: types: [opened, synchronize, reopened, ready_for_review] paths-ignore: + - '.mailmap' - '**.md' + - 'AUTHORS' - 'doc/**' push: branches: @@ -14,7 +16,9 @@ on: - v[0-9]+.x-staging - v[0-9]+.x paths-ignore: + - '.mailmap' - '**.md' + - 'AUTHORS' - 'doc/**' env: From 95528b284da570a45fe68324a00bd4f7b6f0249c Mon Sep 17 00:00:00 2001 From: Darshan Sen Date: Sat, 4 Sep 2021 19:55:04 +0530 Subject: [PATCH 66/95] src: remove unnecessary comment and add a CHECK in crypto_tls.cc Signed-off-by: Darshan Sen PR-URL: https://github.com/nodejs/node/pull/39991 Reviewed-By: Anna Henningsen --- src/crypto/crypto_tls.cc | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/crypto/crypto_tls.cc b/src/crypto/crypto_tls.cc index 398509bc5cefe6..3b646160800082 100644 --- a/src/crypto/crypto_tls.cc +++ b/src/crypto/crypto_tls.cc @@ -144,9 +144,6 @@ int NewSessionCallback(SSL* s, SSL_SESSION* sess) { return 0; // Serialize session - // TODO(@jasnell): An AllocatedBuffer or BackingStore would be better - // here to start eliminating unnecessary uses of Buffer where an ordinary - // Uint8Array would do just fine. Local session = Buffer::New(env, size).FromMaybe(Local()); if (UNLIKELY(session.IsEmpty())) return 0; @@ -154,16 +151,12 @@ int NewSessionCallback(SSL* s, SSL_SESSION* sess) { unsigned char* session_data = reinterpret_cast(Buffer::Data(session)); - memset(session_data, 0, size); - i2d_SSL_SESSION(sess, &session_data); + CHECK_EQ(i2d_SSL_SESSION(sess, &session_data), size); unsigned int session_id_length; const unsigned char* session_id_data = SSL_SESSION_get_id(sess, &session_id_length); - // TODO(@jasnell): An AllocatedBuffer or BackingStore would be better - // here to start eliminating unnecessary uses of Buffer where an ordinary - // Uint8Array would do just fine Local session_id = Buffer::Copy( env, reinterpret_cast(session_id_data), From effdfa91be71e4680887b07370e5b4053f0db1c5 Mon Sep 17 00:00:00 2001 From: Geoffrey Booth Date: Mon, 13 Sep 2021 20:28:02 -0700 Subject: [PATCH 67/95] meta: update GeoffreyBooth email address MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40102 Reviewed-By: Qingyu Deng Reviewed-By: Rich Trott Reviewed-By: Tobias Nießen Reviewed-By: James M Snell --- .mailmap | 2 ++ README.md | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.mailmap b/.mailmap index 5f59ca209638bb..e46d65c78450e7 100644 --- a/.mailmap +++ b/.mailmap @@ -150,6 +150,8 @@ Gabriel de Perthuis Gareth Ellis Garwah Lam garygsc +Geoffrey Booth +Geoffrey Booth Geoffrey Bugaisky Gerhard Stöbich Gibson Fahnestock diff --git a/README.md b/README.md index 185fab03176828..8ee192ed896bee 100644 --- a/README.md +++ b/README.md @@ -331,7 +331,7 @@ For information about the governance of the Node.js project, see * [gengjiawen](https://github.com/gengjiawen) - **Jiawen Geng** <technicalcute@gmail.com> * [GeoffreyBooth](https://github.com/geoffreybooth) - -**Geoffrey Booth** <webmaster@geoffreybooth.com> (he/him) +**Geoffrey Booth** <webadmin@geoffreybooth.com> (he/him) * [gireeshpunathil](https://github.com/gireeshpunathil) - **Gireesh Punathil** <gpunathi@in.ibm.com> (he/him) * [guybedford](https://github.com/guybedford) - From 7fdb12739dd7b7b29baf51474ab2a9a99b36c34a Mon Sep 17 00:00:00 2001 From: Nikita Galkin Date: Wed, 8 Sep 2021 13:00:04 +0300 Subject: [PATCH 68/95] doc: add timeout.close MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40036 Reviewed-By: Antoine du Hamel Reviewed-By: Tobias Nießen Reviewed-By: Michaël Zasso --- doc/api/timers.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/doc/api/timers.md b/doc/api/timers.md index cfbc8f1ead4b71..b5a00619edbc36 100644 --- a/doc/api/timers.md +++ b/doc/api/timers.md @@ -72,6 +72,17 @@ timer is active. Each of the `Timeout` objects returned by these functions export both `timeout.ref()` and `timeout.unref()` functions that can be used to control this default behavior. +### `timeout.close()` + + +> Stability: 3 - Legacy: Use [`clearTimeout()`][] instead. + +* Returns: {Timeout} a reference to `timeout` + +Cancels the timeout. + ### `timeout.hasRef()` + +Do not search modules from global paths like `$HOME/.node_modules` and +`$NODE_PATH`. + ### `--no-warnings` /sg, ''); let text = current.textRaw; @@ -491,8 +491,8 @@ function newSection(header, file) { function textJoin(nodes, file) { return nodes.map((node) => { if (node.type === 'linkReference') { - return file.contents.slice(node.position.start.offset, - node.position.end.offset); + return file.value.slice(node.position.start.offset, + node.position.end.offset); } else if (node.type === 'inlineCode') { return `\`${node.value}\``; } else if (node.type === 'strong') { diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index 04202b1fad56c9..4fedbd4e742ccc 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -11,52 +11,73 @@ "node-doc-generator": "generate.js" }, "devDependencies": { - "highlight.js": "11.0.1", - "js-yaml": "4.1.0", - "rehype-raw": "5.1.0", - "rehype-stringify": "8.0.0", - "remark-frontmatter": "^3.0.0", - "remark-gfm": "^1.0.0", - "remark-html": "13.0.2", - "remark-parse": "^9.0.0", - "remark-rehype": "8.1.0", - "to-vfile": "7.1.0", - "unified": "9.2.1", - "unist-util-select": "4.0.0", - "unist-util-visit": "3.1.0" + "highlight.js": "^11.2.0", + "js-yaml": "^4.1.0", + "rehype-raw": "^6.1.0", + "rehype-stringify": "^9.0.2", + "remark-frontmatter": "^4.0.0", + "remark-gfm": "^2.0.0", + "remark-html": "^14.0.1", + "remark-parse": "^10.0.0", + "remark-rehype": "^9.0.0", + "to-vfile": "^7.2.2", + "unified": "^10.1.0", + "unist-util-select": "^4.0.0", + "unist-util-visit": "^4.0.0" }, "engines": { "node": ">=14.8.0" } }, + "node_modules/@types/debug": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz", + "integrity": "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==", + "dev": true, + "dependencies": { + "@types/ms": "*" + } + }, "node_modules/@types/hast": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.1.tgz", - "integrity": "sha512-viwwrB+6xGzw+G1eWpF9geV3fnsDgXqHG+cqgiHrvQfDUW5hzhCyV7Sy3UJxhfRFBsgky2SSW33qi/YrIkjX5Q==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", + "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "dev": true, "dependencies": { "@types/unist": "*" } }, "node_modules/@types/mdast": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", - "integrity": "sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", + "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "dev": true, "dependencies": { "@types/unist": "*" } }, + "node_modules/@types/mdurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.2.tgz", + "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==", + "dev": true + }, + "node_modules/@types/ms": { + "version": "0.7.31", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", + "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==", + "dev": true + }, "node_modules/@types/parse5": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", - "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-ARATsLdrGPUnaBvxLhUlnltcMgn7pQG312S8ccdYlnyijabrX9RN/KN/iGj9Am96CoW8e/K9628BA7Bv4XHdrA==", "dev": true }, "node_modules/@types/unist": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", - "integrity": "sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", + "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==", "dev": true }, "node_modules/argparse": { @@ -66,9 +87,9 @@ "dev": true }, "node_modules/bail": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", - "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.1.tgz", + "integrity": "sha512-d5FoTAr2S5DSUPKl85WNm2yUwsINN8eidIdIwsOge2t33DaOfOdSmmsI11jMN3GmALCXaw+Y6HMVHDzePshFAA==", "dev": true, "funding": { "type": "github", @@ -82,9 +103,9 @@ "dev": true }, "node_modules/ccount": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", - "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.0.tgz", + "integrity": "sha512-VOR0NWFYX65n9gELQdcpqsie5L5ihBXuZGAgaPEp/U7IOSjnPMEH6geE+2f6lcekaNEfWzAHS45mPvSo5bqsUA==", "dev": true, "funding": { "type": "github", @@ -92,9 +113,9 @@ } }, "node_modules/character-entities": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", - "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.0.tgz", + "integrity": "sha512-oHqMj3eAuJ77/P5PaIRcqk+C3hdfNwyCD2DAUcD5gyXkegAuF2USC40CEqPscDk4I8FRGMTojGJQkXDsN5QlJA==", "dev": true, "funding": { "type": "github", @@ -102,9 +123,9 @@ } }, "node_modules/character-entities-html4": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-1.1.4.tgz", - "integrity": "sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.0.0.tgz", + "integrity": "sha512-dwT2xh5ZhUAjyP96k57ilMKoTQyASaw9IAMR9U5c1lCu2RUni6O6jxfpUEdO2RcPT6TJFvr8pqsbami4Jk+2oA==", "dev": true, "funding": { "type": "github", @@ -112,9 +133,9 @@ } }, "node_modules/character-entities-legacy": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", - "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-2.0.0.tgz", + "integrity": "sha512-YwaEtEvWLpFa6Wh3uVLrvirA/ahr9fki/NUd/Bd4OR6EdJ8D22hovYQEOUCBfQfcqnC4IAMGMsHXY1eXgL4ZZA==", "dev": true, "funding": { "type": "github", @@ -122,9 +143,9 @@ } }, "node_modules/character-reference-invalid": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", - "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.0.tgz", + "integrity": "sha512-pE3Z15lLRxDzWJy7bBHBopRwfI20sbrMVLQTC7xsPglCHf4Wv1e167OgYAFP78co2XlhojDyAqA+IAJse27//g==", "dev": true, "funding": { "type": "github", @@ -132,9 +153,9 @@ } }, "node_modules/comma-separated-tokens": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", - "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.2.tgz", + "integrity": "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==", "dev": true, "funding": { "type": "github", @@ -148,9 +169,9 @@ "dev": true }, "node_modules/debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -165,12 +186,12 @@ } }, "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "dev": true, "engines": { - "node": ">=10" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -183,9 +204,9 @@ "dev": true }, "node_modules/fault": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", - "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.0.tgz", + "integrity": "sha512-JsDj9LFcoC+4ChII1QpXPA7YIaY8zmqPYw7h9j5n7St7a0BBKfNnwEBAUQRBx70o2q4rs+BeSNHk8Exm6xE7fQ==", "dev": true, "dependencies": { "format": "^0.2.0" @@ -205,18 +226,18 @@ } }, "node_modules/hast-to-hyperscript": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", - "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-10.0.1.tgz", + "integrity": "sha512-dhIVGoKCQVewFi+vz3Vt567E4ejMppS1haBRL6TEmeLeJVB1i/FJIIg/e6s1Bwn0g5qtYojHEKvyGA+OZuyifw==", "dev": true, "dependencies": { - "@types/unist": "^2.0.3", - "comma-separated-tokens": "^1.0.0", - "property-information": "^5.3.0", - "space-separated-tokens": "^1.0.0", + "@types/unist": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", "style-to-object": "^0.3.0", - "unist-util-is": "^4.0.0", - "web-namespaces": "^1.0.0" + "unist-util-is": "^5.0.0", + "web-namespaces": "^2.0.0" }, "funding": { "type": "opencollective", @@ -224,17 +245,19 @@ } }, "node_modules/hast-util-from-parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", - "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.0.tgz", + "integrity": "sha512-m8yhANIAccpU4K6+121KpPP55sSl9/samzQSQGpb0mTExcNh2WlvjtMwSWFhg6uqD4Rr6Nfa8N6TMypQM51rzQ==", "dev": true, "dependencies": { - "@types/parse5": "^5.0.0", - "hastscript": "^6.0.0", - "property-information": "^5.0.0", - "vfile": "^4.0.0", - "vfile-location": "^3.2.0", - "web-namespaces": "^1.0.0" + "@types/hast": "^2.0.0", + "@types/parse5": "^6.0.0", + "@types/unist": "^2.0.0", + "hastscript": "^7.0.0", + "property-information": "^6.0.0", + "vfile": "^5.0.0", + "vfile-location": "^4.0.0", + "web-namespaces": "^2.0.0" }, "funding": { "type": "opencollective", @@ -242,57 +265,49 @@ } }, "node_modules/hast-util-is-element": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz", - "integrity": "sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-2.1.1.tgz", + "integrity": "sha512-ag0fiZfRWsPiR1udvnSbaazJLGv8qd8E+/e3rW8rUZhbKG4HNJmFL4QkEceN+22BgE+uozXY30z/s+2dL6Z++g==", "dev": true, + "dependencies": { + "@types/hast": "^2.0.0", + "@types/unist": "^2.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-parse-selector": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", - "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.0.tgz", + "integrity": "sha512-AyjlI2pTAZEOeu7GeBPZhROx0RHBnydkQIXlhnFzDi0qfXTmGUWoCYZtomHbrdrheV4VFUlPcfJ6LMF5T6sQzg==", "dev": true, + "dependencies": { + "@types/hast": "^2.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/hast-util-raw": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.1.0.tgz", - "integrity": "sha512-5FoZLDHBpka20OlZZ4I/+RBw5piVQ8iI1doEvffQhx5CbCyTtP8UCq8Tw6NmTAMtXgsQxmhW7Ly8OdFre5/YMQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.0.tgz", + "integrity": "sha512-K2ofsY59XqrtBNUAkvT2vPdyNPUchjj1Z0FxUOwBadS6R5h9O3LaRZqpukQ+YfgQ/IMy9GGMB/Nlpzpu+cuuMA==", "dev": true, "dependencies": { "@types/hast": "^2.0.0", - "hast-util-from-parse5": "^6.0.0", - "hast-util-to-parse5": "^6.0.0", - "html-void-elements": "^1.0.0", + "@types/parse5": "^6.0.0", + "hast-util-from-parse5": "^7.0.0", + "hast-util-to-parse5": "^7.0.0", + "html-void-elements": "^2.0.0", "parse5": "^6.0.0", - "unist-util-position": "^3.0.0", - "unist-util-visit": "^2.0.0", - "vfile": "^4.0.0", - "web-namespaces": "^1.0.0", - "xtend": "^4.0.0", - "zwitch": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-raw/node_modules/unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", - "dev": true, - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0", + "vfile": "^5.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" }, "funding": { "type": "opencollective", @@ -300,12 +315,12 @@ } }, "node_modules/hast-util-sanitize": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-3.0.2.tgz", - "integrity": "sha512-+2I0x2ZCAyiZOO/sb4yNLFmdwPBnyJ4PBkVTUMKMqBwYNA+lXSgOmoRXlJFazoyid9QPogRRKgKhVEodv181sA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-4.0.0.tgz", + "integrity": "sha512-pw56+69jq+QSr/coADNvWTmBPDy+XsmwaF5KnUys4/wM1jt/fZdl7GPxhXXXYdXnz3Gj3qMkbUCH2uKjvX0MgQ==", "dev": true, "dependencies": { - "xtend": "^4.0.0" + "@types/hast": "^2.0.0" }, "funding": { "type": "opencollective", @@ -313,21 +328,21 @@ } }, "node_modules/hast-util-to-html": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-7.1.3.tgz", - "integrity": "sha512-yk2+1p3EJTEE9ZEUkgHsUSVhIpCsL/bvT8E5GzmWc+N1Po5gBw+0F8bo7dpxXR0nu0bQVxVZGX2lBGF21CmeDw==", - "dev": true, - "dependencies": { - "ccount": "^1.0.0", - "comma-separated-tokens": "^1.0.0", - "hast-util-is-element": "^1.0.0", - "hast-util-whitespace": "^1.0.0", - "html-void-elements": "^1.0.0", - "property-information": "^5.0.0", - "space-separated-tokens": "^1.0.0", - "stringify-entities": "^3.0.1", - "unist-util-is": "^4.0.0", - "xtend": "^4.0.0" + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-8.0.2.tgz", + "integrity": "sha512-ipLhUTMyyJi9F/LXaNDG9BrRdshP6obCfmUZYbE/+T639IdzqAOkKN4DyrEyID0gbb+rsC3PKf0XlviZwzomhw==", + "dev": true, + "dependencies": { + "@types/hast": "^2.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-is-element": "^2.0.0", + "hast-util-whitespace": "^2.0.0", + "html-void-elements": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "unist-util-is": "^5.0.0" }, "funding": { "type": "opencollective", @@ -335,16 +350,17 @@ } }, "node_modules/hast-util-to-parse5": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", - "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.0.0.tgz", + "integrity": "sha512-YHiS6aTaZ3N0Q3nxaY/Tj98D6kM8QX5Q8xqgg8G45zR7PvWnPGPP0vcKCgb/moIydEJ/QWczVrX0JODCVeoV7A==", "dev": true, "dependencies": { - "hast-to-hyperscript": "^9.0.0", - "property-information": "^5.0.0", - "web-namespaces": "^1.0.0", - "xtend": "^4.0.0", - "zwitch": "^1.0.0" + "@types/hast": "^2.0.0", + "@types/parse5": "^6.0.0", + "hast-to-hyperscript": "^10.0.0", + "property-information": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" }, "funding": { "type": "opencollective", @@ -352,9 +368,9 @@ } }, "node_modules/hast-util-whitespace": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz", - "integrity": "sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.0.tgz", + "integrity": "sha512-Pkw+xBHuV6xFeJprJe2BBEoDV+AvQySaz3pPDRUs5PNZEMQjpXJJueqrpcHIXxnWTcAGi/UOCgVShlkY6kLoqg==", "dev": true, "funding": { "type": "opencollective", @@ -362,16 +378,16 @@ } }, "node_modules/hastscript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", - "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.0.2.tgz", + "integrity": "sha512-uA8ooUY4ipaBvKcMuPehTAB/YfFLSSzCwFSwT6ltJbocFUKH/GDHLN+tflq7lSRf9H86uOuxOFkh1KgIy3Gg2g==", "dev": true, "dependencies": { "@types/hast": "^2.0.0", - "comma-separated-tokens": "^1.0.0", - "hast-util-parse-selector": "^2.0.0", - "property-information": "^5.0.0", - "space-separated-tokens": "^1.0.0" + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^3.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" }, "funding": { "type": "opencollective", @@ -379,18 +395,18 @@ } }, "node_modules/highlight.js": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", - "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.2.0.tgz", + "integrity": "sha512-JOySjtOEcyG8s4MLR2MNbLUyaXqUunmSnL2kdV/KuGJOmHZuAR5xC54Ko7goAXBWNhf09Vy3B+U7vR62UZ/0iw==", "dev": true, "engines": { "node": ">=12.0.0" } }, "node_modules/html-void-elements": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", - "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.0.tgz", + "integrity": "sha512-4OYzQQsBt0G9bJ/nM9/DDsjm4+fVdzAaPJJcWk5QwA3GIAPxQEeOR0rsI8HbDHQz5Gta8pVvGnnTNSbZVEVvkQ==", "dev": true, "funding": { "type": "github", @@ -404,9 +420,9 @@ "dev": true }, "node_modules/is-alphabetical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", - "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.0.tgz", + "integrity": "sha512-5OV8Toyq3oh4eq6sbWTYzlGdnMT/DPI5I0zxUBxjiigQsZycpkKF3kskkao3JyYGuYDHvhgJF+DrjMQp9SX86w==", "dev": true, "funding": { "type": "github", @@ -414,13 +430,13 @@ } }, "node_modules/is-alphanumerical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", - "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.0.tgz", + "integrity": "sha512-t+2GlJ+hO9yagJ+jU3+HSh80VKvz/3cG2cxbGGm4S0hjKuhWQXgPVUVOZz3tqZzMjhmphZ+1TIJTlRZRoe6GCQ==", "dev": true, "dependencies": { - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0" + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" }, "funding": { "type": "github", @@ -451,9 +467,9 @@ } }, "node_modules/is-decimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.0.tgz", + "integrity": "sha512-QfrfjQV0LjoWQ1K1XSoEZkTAzSa14RKVMa5zg3SdAfzEmQzRM4+tbSFWb78creCeA9rNBzaZal92opi1TwPWZw==", "dev": true, "funding": { "type": "github", @@ -461,9 +477,9 @@ } }, "node_modules/is-hexadecimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", - "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.0.tgz", + "integrity": "sha512-vGOtYkiaxwIiR0+Ng/zNId+ZZehGfINwTzdrDqc6iubbnQWhnPuYymOzOKUDqa2cSl59yHnEh2h6MvRLQsyNug==", "dev": true, "funding": { "type": "github", @@ -471,12 +487,15 @@ } }, "node_modules/is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.0.0.tgz", + "integrity": "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==", "dev": true, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/js-yaml": { @@ -492,9 +511,9 @@ } }, "node_modules/longest-streak": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", - "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.0.0.tgz", + "integrity": "sha512-XhUjWR5CFaQ03JOP+iSDS9koy8T5jfoImCZ4XprElw3BXsSk4MpVYOLw/6LTDKZhO13PlAXnB5gS4MHQTpkSOw==", "dev": true, "funding": { "type": "github", @@ -502,25 +521,24 @@ } }, "node_modules/markdown-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", - "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.1.tgz", + "integrity": "sha512-CBbaYXKSGnE1uLRpKA1SWgIRb2PQrpkllNWpZtZe6VojOJ4ysqiq7/2glYcmKsOYN09QgH/HEBX5hIshAeiK6A==", "dev": true, - "dependencies": { - "repeat-string": "^1.0.0" - }, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, "node_modules/mdast-util-definitions": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", - "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.0.tgz", + "integrity": "sha512-5hcR7FL2EuZ4q6lLMUK5w4lHT2H3vqL9quPvYZ/Ku5iifrirfMHiGdhxdXMUbUkDmz5I+TYMd7nbaxUhbQkfpQ==", "dev": true, "dependencies": { - "unist-util-visit": "^2.0.0" + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "unist-util-visit": "^3.0.0" }, "funding": { "type": "opencollective", @@ -528,14 +546,14 @@ } }, "node_modules/mdast-util-definitions/node_modules/unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-3.1.0.tgz", + "integrity": "sha512-Szoh+R/Ll68QWAyQyZZpQzZQm2UPbxibDvaY8Xc9SUtYgPsDzx5AWSk++UUt2hJuow8mvwR+rG+LQLw+KsuAKA==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^4.0.0" }, "funding": { "type": "opencollective", @@ -543,14 +561,14 @@ } }, "node_modules/mdast-util-find-and-replace": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", - "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.1.0.tgz", + "integrity": "sha512-1w1jbqAd13oU78QPBf5223+xB+37ecNtQ1JElq2feWols5oEYAl+SgNDnOZipe7NfLemoEt362yUS15/wip4mw==", "dev": true, "dependencies": { - "escape-string-regexp": "^4.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^4.0.0" }, "funding": { "type": "opencollective", @@ -558,16 +576,21 @@ } }, "node_modules/mdast-util-from-markdown": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz", - "integrity": "sha512-2hkTXtYYnr+NubD/g6KGBS/0mFmBcifAsI0yIWRiRo0PjVs6SSOSOdtzbp6kSGnShDN6G5aWZpKQ2lWRy27mWQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.0.0.tgz", + "integrity": "sha512-uj2G60sb7z1PNOeElFwCC9b/Se/lFXuLhVKFOAY2EHz/VvgbupTQRNXPoZl7rGpXYL6BNZgcgaybrlSWbo7n/g==", "dev": true, "dependencies": { "@types/mdast": "^3.0.0", - "mdast-util-to-string": "^2.0.0", - "micromark": "~2.11.0", - "parse-entities": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" + "@types/unist": "^2.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "parse-entities": "^3.0.0", + "unist-util-stringify-position": "^3.0.0" }, "funding": { "type": "opencollective", @@ -575,12 +598,12 @@ } }, "node_modules/mdast-util-frontmatter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-0.2.0.tgz", - "integrity": "sha512-FHKL4w4S5fdt1KjJCwB0178WJ0evnyyQr5kXTM3wrOVpytD0hrkvd+AOOjU9Td8onOejCkmZ+HQRT3CZ3coHHQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-1.0.0.tgz", + "integrity": "sha512-7itKvp0arEVNpCktOET/eLFAYaZ+0cNjVtFtIPxgQ5tV+3i+D4SDDTjTzPWl44LT59PC+xdx+glNTawBdF98Mw==", "dev": true, "dependencies": { - "micromark-extension-frontmatter": "^0.2.0" + "micromark-extension-frontmatter": "^1.0.0" }, "funding": { "type": "opencollective", @@ -588,16 +611,15 @@ } }, "node_modules/mdast-util-gfm": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", - "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-1.0.0.tgz", + "integrity": "sha512-JY4qImsTqivQ0Gl3qvdaizCpomFaNrHnjEhNjNNKeNEA5jZHAJDYu1+yO4V9jn4/ti8GrKdAScaT4F71knoxsA==", "dev": true, "dependencies": { - "mdast-util-gfm-autolink-literal": "^0.1.0", - "mdast-util-gfm-strikethrough": "^0.2.0", - "mdast-util-gfm-table": "^0.1.0", - "mdast-util-gfm-task-list-item": "^0.1.0", - "mdast-util-to-markdown": "^0.6.1" + "mdast-util-gfm-autolink-literal": "^1.0.0", + "mdast-util-gfm-strikethrough": "^1.0.0", + "mdast-util-gfm-table": "^1.0.0", + "mdast-util-gfm-task-list-item": "^1.0.0" }, "funding": { "type": "opencollective", @@ -605,14 +627,15 @@ } }, "node_modules/mdast-util-gfm-autolink-literal": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", - "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.1.tgz", + "integrity": "sha512-dCUDNYXCytIonTHIUOZXp5S3FWd1XAt6IVH1fBfH6BbUF9U+9m1T9XllfHPvKJCccKNI+0RlYmQJ0rfMTDxEtA==", "dev": true, "dependencies": { - "ccount": "^1.0.0", - "mdast-util-find-and-replace": "^1.1.0", - "micromark": "^2.11.3" + "@types/mdast": "^3.0.0", + "ccount": "^2.0.0", + "mdast-util-find-and-replace": "^2.0.0", + "micromark-util-character": "^1.0.0" }, "funding": { "type": "opencollective", @@ -620,12 +643,13 @@ } }, "node_modules/mdast-util-gfm-strikethrough": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", - "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.0.tgz", + "integrity": "sha512-gM9ipBUdRxYa6Yq1Hd8Otg6jEn/dRxFZ1F9ZX4QHosHOexLGqNZO2dh0A+YFbUEd10RcKjnjb4jOfJJzoXXUew==", "dev": true, "dependencies": { - "mdast-util-to-markdown": "^0.6.0" + "@types/mdast": "^3.0.3", + "mdast-util-to-markdown": "^1.0.0" }, "funding": { "type": "opencollective", @@ -633,13 +657,13 @@ } }, "node_modules/mdast-util-gfm-table": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", - "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.1.tgz", + "integrity": "sha512-NByKuaSg5+M6r9DZBPXFUmhMHGFf9u+WE76EeStN01ghi8hpnydiWBXr+qj0XCRWI7SAMNtEjGvip6zci9axQA==", "dev": true, "dependencies": { - "markdown-table": "^2.0.0", - "mdast-util-to-markdown": "~0.6.0" + "markdown-table": "^3.0.0", + "mdast-util-to-markdown": "^1.0.0" }, "funding": { "type": "opencollective", @@ -647,12 +671,13 @@ } }, "node_modules/mdast-util-gfm-task-list-item": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", - "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.0.tgz", + "integrity": "sha512-dwkzOTjQe8JCCHVE3Cb0pLHTYLudf7t9WCAnb20jI8/dW+VHjgWhjtIUVA3oigNkssgjEwX+i+3XesUdCnXGyA==", "dev": true, "dependencies": { - "mdast-util-to-markdown": "~0.6.0" + "@types/mdast": "^3.0.3", + "mdast-util-to-markdown": "^1.0.0" }, "funding": { "type": "opencollective", @@ -660,34 +685,20 @@ } }, "node_modules/mdast-util-to-hast": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.2.0.tgz", - "integrity": "sha512-JoPBfJ3gBnHZ18icCwHR50orC9kNH81tiR1gs01D8Q5YpV6adHNO9nKNuFBCJQ941/32PT1a63UF/DitmS3amQ==", + "version": "11.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-11.2.1.tgz", + "integrity": "sha512-tvy3qzo/SKxkQ9smt7D7NX+4nAQng+eK4/A7PVUzAT4+n0NtgaDRyZA2DmGExAbW7xUX4O+4jkO2u94dNStssw==", "dev": true, "dependencies": { + "@types/hast": "^2.0.0", "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "mdast-util-definitions": "^4.0.0", + "@types/mdurl": "^1.0.0", + "mdast-util-definitions": "^5.0.0", "mdurl": "^1.0.0", - "unist-builder": "^2.0.0", - "unist-util-generated": "^1.0.0", - "unist-util-position": "^3.0.0", - "unist-util-visit": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", - "dev": true, - "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "unist-builder": "^3.0.0", + "unist-util-generated": "^2.0.0", + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0" }, "funding": { "type": "opencollective", @@ -695,17 +706,18 @@ } }, "node_modules/mdast-util-to-markdown": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", - "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.2.2.tgz", + "integrity": "sha512-G8/IwiB2clO8tJsw/fBNNilZ8wDKJnnOts0a3Ls6DVKiy+K7wHGfZDMxLrYXW3QuHiNOZiSU1KduL1oBY4MQqQ==", "dev": true, "dependencies": { + "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", - "longest-streak": "^2.0.0", - "mdast-util-to-string": "^2.0.0", - "parse-entities": "^2.0.0", - "repeat-string": "^1.0.0", - "zwitch": "^1.0.0" + "longest-streak": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "parse-entities": "^3.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" }, "funding": { "type": "opencollective", @@ -713,9 +725,9 @@ } }, "node_modules/mdast-util-to-string": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", - "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.0.tgz", + "integrity": "sha512-n4Vypz/DZgwo0iMHLQL49dJzlp7YtAJP+N07MZHpjPf/5XJuHUWstviF4Mn2jEiR/GNmtnRRqnwsXExk3igfFA==", "dev": true, "funding": { "type": "opencollective", @@ -729,9 +741,9 @@ "dev": true }, "node_modules/micromark": { - "version": "2.11.4", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", - "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.0.5.tgz", + "integrity": "sha512-QfjERBnPw0G9mxhOCkkbRP0n8SX8lIBLrEKeEVceviUukqVMv3hWE4AgNTOK/W6GWqtPvvIHg2Apl3j1Dxm6aQ==", "dev": true, "funding": [ { @@ -744,17 +756,66 @@ } ], "dependencies": { + "@types/debug": "^4.0.0", "debug": "^4.0.0", - "parse-entities": "^2.0.0" + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "parse-entities": "^3.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.0.1.tgz", + "integrity": "sha512-vEOw8hcQ3nwHkKKNIyP9wBi8M50zjNajtmI+cCUWcVfJS+v5/3WCh4PLKf7PPRZFUutjzl4ZjlHwBWUKfb/SkA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "parse-entities": "^3.0.0" } }, "node_modules/micromark-extension-frontmatter": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-0.2.2.tgz", - "integrity": "sha512-q6nPLFCMTLtfsctAuS0Xh4vaolxSFUWUWR6PZSrXXiRy+SANGllpcqdXFv2z07l0Xz/6Hl40hK0ffNCJPH2n1A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-1.0.0.tgz", + "integrity": "sha512-EXjmRnupoX6yYuUJSQhrQ9ggK0iQtQlpi6xeJzVD5xscyAI+giqco5fdymayZhJMbIFecjnE2yz85S9NzIgQpg==", "dev": true, "dependencies": { - "fault": "^1.0.0" + "fault": "^2.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0" }, "funding": { "type": "opencollective", @@ -762,17 +823,18 @@ } }, "node_modules/micromark-extension-gfm": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", - "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-1.0.0.tgz", + "integrity": "sha512-OjqbQPL1Vec/4l5hnC8WnMNmWwgrT9JvzR2udqIGrGKecZsdwY9GAWZ5482CuD12SXuHNj8aS8epni6ip0Pwog==", "dev": true, "dependencies": { - "micromark": "~2.11.0", - "micromark-extension-gfm-autolink-literal": "~0.5.0", - "micromark-extension-gfm-strikethrough": "~0.6.5", - "micromark-extension-gfm-table": "~0.4.0", - "micromark-extension-gfm-tagfilter": "~0.3.0", - "micromark-extension-gfm-task-list-item": "~0.3.0" + "micromark-extension-gfm-autolink-literal": "^1.0.0", + "micromark-extension-gfm-strikethrough": "^1.0.0", + "micromark-extension-gfm-table": "^1.0.0", + "micromark-extension-gfm-tagfilter": "^1.0.0", + "micromark-extension-gfm-task-list-item": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-types": "^1.0.0" }, "funding": { "type": "opencollective", @@ -780,12 +842,15 @@ } }, "node_modules/micromark-extension-gfm-autolink-literal": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", - "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.0.tgz", + "integrity": "sha512-t+K0aPK32mXypVTEKV+WRfoT/Rb7MERDgHZVRr56NXpyQQhgMk72QnK4NljYUlrgbuesH+MxiPQwThzqRDIwvA==", "dev": true, "dependencies": { - "micromark": "~2.11.3" + "micromark-util-character": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" }, "funding": { "type": "opencollective", @@ -793,12 +858,16 @@ } }, "node_modules/micromark-extension-gfm-strikethrough": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", - "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.1.tgz", + "integrity": "sha512-fzGYXWz9HPWH1uHqYwdyR8XpEtuoYVHUjTdPQTnl3ETVZOQe1NXMwE3RA7AMqeON52hG+kO9g1/P1+pLONBSMQ==", "dev": true, "dependencies": { - "micromark": "~2.11.0" + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" }, "funding": { "type": "opencollective", @@ -806,12 +875,15 @@ } }, "node_modules/micromark-extension-gfm-table": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", - "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.0.tgz", + "integrity": "sha512-OATRuHDgEAT/aaJJRSdU12V+s01kNSnJ0jumdfLq5mPy0F5DkR3zbTSFLH4tjVYM0/kEG6umxIhHY62mFe4z5Q==", "dev": true, "dependencies": { - "micromark": "~2.11.0" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" }, "funding": { "type": "opencollective", @@ -819,272 +891,586 @@ } }, "node_modules/micromark-extension-gfm-tagfilter": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", - "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.0.tgz", + "integrity": "sha512-GGUZhzQrOdHR8RHU2ru6K+4LMlj+pBdNuXRtw5prOflDOk2hHqDB0xEgej1AHJ2VETeycX7tzQh2EmaTUOmSKg==", "dev": true, + "dependencies": { + "micromark-util-types": "^1.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/micromark-extension-gfm-task-list-item": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", - "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.0.tgz", + "integrity": "sha512-3tkHCq1NNwijtwpjYba9+rl1yvQ4xYg8iQpUAfTJRyq8MtIEsBUF/vW6B9Gh8Qwy1hE2FmpyHhP4jnFAt61zLg==", "dev": true, "dependencies": { - "micromark": "~2.11.0" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/nth-check": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", - "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", + "node_modules/micromark-factory-destination": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz", + "integrity": "sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/parse-entities": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", - "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "node_modules/micromark-factory-label": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.0.0.tgz", + "integrity": "sha512-XWEucVZb+qBCe2jmlOnWr6sWSY6NHx+wtpgYFsm4G+dufOf6tTQRRo0bdO7XSlGPu5fyjpJenth6Ksnc5Mwfww==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", - "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", - "dev": true - }, - "node_modules/property-information": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", - "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", + "node_modules/micromark-factory-space": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.0.0.tgz", + "integrity": "sha512-qUmqs4kj9a5yBnk3JMLyjtWYN6Mzfcx8uJfi5XAveBniDevmZasdGBba5b4QsvRcAkmvGo5ACmSUmyGiKTLZew==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "xtend": "^4.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/rehype-raw": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-5.1.0.tgz", - "integrity": "sha512-MDvHAb/5mUnif2R+0IPCYJU8WjHa9UzGtM/F4AVy5GixPlDZ1z3HacYy4xojDU+uBa+0X/3PIfyQI26/2ljJNA==", + "node_modules/micromark-factory-title": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.0.0.tgz", + "integrity": "sha512-flvC7Gx0dWVWorXuBl09Cr3wB5FTuYec8pMGVySIp2ZlqTcIjN/lFohZcP0EG//krTptm34kozHk7aK/CleCfA==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "hast-util-raw": "^6.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/rehype-stringify": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-8.0.0.tgz", - "integrity": "sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g==", + "node_modules/micromark-factory-whitespace": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.0.0.tgz", + "integrity": "sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "hast-util-to-html": "^7.1.1" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/remark-frontmatter": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-3.0.0.tgz", - "integrity": "sha512-mSuDd3svCHs+2PyO29h7iijIZx4plX0fheacJcAoYAASfgzgVIcXGYSq9GFyYocFLftQs8IOmmkgtOovs6d4oA==", + "node_modules/micromark-util-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.1.0.tgz", + "integrity": "sha512-agJ5B3unGNJ9rJvADMJ5ZiYjBRyDpzKAOk01Kpi1TKhlT1APx3XZk6eN7RtSz1erbWHC2L8T3xLZ81wdtGRZzg==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "mdast-util-frontmatter": "^0.2.0", - "micromark-extension-frontmatter": "^0.2.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/remark-gfm": { + "node_modules/micromark-util-chunked": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", - "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.0.0.tgz", + "integrity": "sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g==", "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], "dependencies": { - "mdast-util-gfm": "^0.1.0", - "micromark-extension-gfm": "^0.3.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/remark-html": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.2.tgz", - "integrity": "sha512-LhSRQ+3RKdBqB/RGesFWkNNfkGqprDUCwjq54SylfFeNyZby5kqOG8Dn/vYsRoM8htab6EWxFXCY6XIZvMoRiQ==", + "node_modules/micromark-util-classify-character": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.0.0.tgz", + "integrity": "sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.0.0.tgz", + "integrity": "sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.0.0.tgz", + "integrity": "sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.0.0.tgz", + "integrity": "sha512-cJpFVM768h6zkd8qJ1LNRrITfY4gwFt+tziPcIf71Ui8yFzY9wG3snZQqiWVq93PG4Sw6YOtcNiKJfVIs9qfGg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.0.0.tgz", + "integrity": "sha512-NenEKIshW2ZI/ERv9HtFNsrn3llSPZtY337LID/24WeLqMzeZhBEE6BQ0vS2ZBjshm5n40chKtJ3qjAbVV8S0g==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.0.0.tgz", + "integrity": "sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.0.0.tgz", + "integrity": "sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.0.0.tgz", + "integrity": "sha512-cCxvBKlmac4rxCGx6ejlIviRaMKZc0fWm5HdCHEeDWRSkn44l6NdYVRyU+0nT1XC72EQJMZV8IPHF+jTr56lAg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.0.0.tgz", + "integrity": "sha512-EsnG2qscmcN5XhkqQBZni/4oQbLFjz9yk3ZM/P8a3YUjwV6+6On2wehr1ALx0MxK3+XXXLTzuBKHDFeDFYRdgQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.0.0.tgz", + "integrity": "sha512-NZA01jHRNCt4KlOROn8/bGi6vvpEmlXld7EHcRH+aYWUfL3Wc8JLUNNlqUMKa0hhz6GrpUWsHtzPmKof57v0gQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.0.1.tgz", + "integrity": "sha512-UT0ylWEEy80RFYzK9pEaugTqaxoD/j0Y9WhHpSyitxd99zjoQz7JJ+iKuhPAgOW2MiPSUAx+c09dcqokeyaROA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nth-check": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "dev": true, "dependencies": { - "hast-util-sanitize": "^3.0.0", - "hast-util-to-html": "^7.0.0", - "mdast-util-to-hast": "^10.0.0" + "boolbase": "^1.0.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/fb55/nth-check?sponsor=1" } }, - "node_modules/remark-parse": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", - "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", + "node_modules/parse-entities": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-3.0.0.tgz", + "integrity": "sha512-AJlcIFDNPEP33KyJLguv0xJc83BNvjxwpuUIcetyXUsLpVXAUCePJ5kIoYtEN2R1ac0cYaRu/vk9dVFkewHQhQ==", "dev": true, "dependencies": { - "mdast-util-from-markdown": "^0.8.0" + "character-entities": "^2.0.0", + "character-entities-legacy": "^2.0.0", + "character-reference-invalid": "^2.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/remark-rehype": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-8.1.0.tgz", - "integrity": "sha512-EbCu9kHgAxKmW1yEYjx3QafMyGY3q8noUbNUI5xyKbaFP89wbhDrKxyIQNukNYthzjNHZu6J7hwFg7hRm1svYA==", + "node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "node_modules/property-information": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.0.1.tgz", + "integrity": "sha512-F4WUUAF7fMeF4/JUFHNBWDaKDXi2jbvqBW/y6o5wsf3j19wTZ7S60TmtB5HoBhtgw7NKQRMWuz5vk2PR0CygUg==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/rehype-raw": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.0.tgz", + "integrity": "sha512-12j2UiiYJgZFdjnHDny77NY5BF3eW4Jsl0vtgL1DWdTzcHjPpbhumU+GtPUdivEWwQc8x9OdEuO0oxaGz7Tvyg==", "dev": true, "dependencies": { - "mdast-util-to-hast": "^10.2.0" + "@types/hast": "^2.0.0", + "hast-util-raw": "^7.2.0", + "unified": "^10.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "node_modules/rehype-stringify": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-9.0.2.tgz", + "integrity": "sha512-BuVA6lAEYtOpXO2xuHLohAzz8UNoQAxAqYRqh4QEEtU39Co+P1JBZhw6wXA9hMWp+JLcmrxWH8+UKcNSr443Fw==", "dev": true, - "engines": { - "node": ">=0.10" + "dependencies": { + "@types/hast": "^2.0.0", + "hast-util-to-html": "^8.0.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/space-separated-tokens": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", - "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", + "node_modules/remark-frontmatter": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-4.0.0.tgz", + "integrity": "sha512-0J+2czWAS9sz9baJJel4tTUnNhMI7wYgih99Hxhdeq2GpdI1Ctu0iol6zAsWw5xa+jLsZXNiwEnnJAJo3XX3hw==", "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0", + "mdast-util-frontmatter": "^1.0.0", + "micromark-extension-frontmatter": "^1.0.0", + "unified": "^10.0.0" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/stringify-entities": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-3.1.0.tgz", - "integrity": "sha512-3FP+jGMmMV/ffZs86MoghGqAoqXAdxLrJP4GUdrDN1aIScYih5tuIO3eF4To5AJZ79KDZ8Fpdy7QJnK8SsL1Vg==", + "node_modules/remark-gfm": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-2.0.0.tgz", + "integrity": "sha512-waIv4Tjcd2CTUDxKRYzuPyIHw1FoX4H2GjXAzXV9PxQWb+dU4fJivd/FZ+nxyzPARrqTjMIkwIwPoWNbpBhjcQ==", "dev": true, "dependencies": { - "character-entities-html4": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "xtend": "^4.0.0" + "@types/mdast": "^3.0.0", + "mdast-util-gfm": "^1.0.0", + "micromark-extension-gfm": "^1.0.0", + "unified": "^10.0.0" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/style-to-object": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", - "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", + "node_modules/remark-html": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-14.0.1.tgz", + "integrity": "sha512-a/x5bTlFrkwYkz43zuJIk0m0IuS5Rx8zLztGwdzmAdUj0Hsi4C4nkJ8gTQRNXY/ET/gMrqQORMMI0arRItq/aQ==", "dev": true, "dependencies": { - "inline-style-parser": "0.1.1" + "@types/mdast": "^3.0.0", + "hast-util-sanitize": "^4.0.0", + "hast-util-to-html": "^8.0.0", + "mdast-util-to-hast": "^11.0.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/to-vfile": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-7.1.0.tgz", - "integrity": "sha512-t1c42ASuWo39ddjh2R+hX5+kbDcc2CmbhaTSJkVavDYMjnFkpq0L4LeF+rcPoDdieGUsFaivSSkmwuFpYzhBZw==", + "node_modules/remark-parse": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.0.tgz", + "integrity": "sha512-07ei47p2Xl7Bqbn9H2VYQYirnAFJPwdMuypdozWsSbnmrkgA2e2sZLZdnDNrrsxR4onmIzH/J6KXqKxCuqHtPQ==", "dev": true, "dependencies": { - "is-buffer": "^2.0.0", - "vfile": "^5.0.0" + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "unified": "^10.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/to-vfile/node_modules/unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", + "node_modules/remark-rehype": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-9.0.0.tgz", + "integrity": "sha512-SFA+mPWu45ynFPKeT3h5eNNVAYoMp3wizr3KSKh1IQ9L6dLSyD25/df6/vv8EW8ji3O3dnZGdbLQl592Tn+ydg==", "dev": true, "dependencies": { - "@types/unist": "^2.0.0" + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-to-hast": "^11.0.0", + "unified": "^10.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/to-vfile/node_modules/vfile": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.0.1.tgz", - "integrity": "sha512-lbcf0k66x96Syy36HG+nIBFaSD/fAk589q4nETZTr0JW7eRRmrVo1vHwbD8NlHszUM5ICtFSWQ5xHC292hYZ/w==", + "node_modules/space-separated-tokens": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.1.tgz", + "integrity": "sha512-ekwEbFp5aqSPKaqeY1PGrlGQxPNaq+Cnx4+bE2D8sciBQrHpbwoBbawqTN2+6jPs9IdWxxiUcN0K2pkczD3zmw==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.1.tgz", + "integrity": "sha512-gmMQxKXPWIO3NXNSPyWNhlYcBNGpPA/487D+9dLPnU4xBnIrnHdr8cv5rGJOS/1BRxEXRb7uKwg7BA36IWV7xg==", "dev": true, "dependencies": { - "@types/unist": "^2.0.0", - "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^3.0.0", - "vfile-message": "^3.0.0" + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^2.0.0" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/to-vfile/node_modules/vfile-message": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.0.1.tgz", - "integrity": "sha512-gYmSHcZZUEtYpTmaWaFJwsuUD70/rTY4v09COp8TGtOkix6gGxb/a8iTQByIY9ciTk9GwAwIXd/J9OPfM4Bvaw==", + "node_modules/style-to-object": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", + "integrity": "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==", "dev": true, "dependencies": { - "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^3.0.0" + "inline-style-parser": "0.1.1" + } + }, + "node_modules/to-vfile": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-7.2.2.tgz", + "integrity": "sha512-7WL+coet3qyaYb5vrVrfLtOUHgNv9E1D5SIsyVKmHKcgZefy77WMQRk7FByqGKNInoHOlY6xkTGymo29AwjUKg==", + "dev": true, + "dependencies": { + "is-buffer": "^2.0.0", + "vfile": "^5.1.0" }, "funding": { "type": "opencollective", @@ -1092,9 +1478,9 @@ } }, "node_modules/trough": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", - "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", + "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==", "dev": true, "funding": { "type": "github", @@ -1102,17 +1488,18 @@ } }, "node_modules/unified": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.1.tgz", - "integrity": "sha512-juWjuI8Z4xFg8pJbnEZ41b5xjGUWGHqXALmBZ3FC3WX0PIx1CZBIIJ6mXbYMcf6Yw4Fi0rFUTA1cdz/BglbOhA==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.0.tgz", + "integrity": "sha512-4U3ru/BRXYYhKbwXV6lU6bufLikoAavTwev89H5UxY8enDFaAT2VXmIXYNm6hb5oHPng/EXr77PVyDFcptbk5g==", "dev": true, "dependencies": { - "bail": "^1.0.0", + "@types/unist": "^2.0.0", + "bail": "^2.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", - "is-plain-obj": "^2.0.0", - "trough": "^1.0.0", - "vfile": "^4.0.0" + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" }, "funding": { "type": "opencollective", @@ -1120,19 +1507,22 @@ } }, "node_modules/unist-builder": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", - "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-3.0.0.tgz", + "integrity": "sha512-GFxmfEAa0vi9i5sd0R2kcrI9ks0r82NasRq5QHh2ysGngrc6GiqD5CDf1FjPenY4vApmFASBIIlk/jj5J5YbmQ==", "dev": true, + "dependencies": { + "@types/unist": "^2.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/unist-util-generated": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", - "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.0.tgz", + "integrity": "sha512-TiWE6DVtVe7Ye2QxOVW9kqybs6cZexNwTwSMVgkfjEReqy/xwGpAXb99OxktoWwmL+Z+Epb0Dn8/GNDYP1wnUw==", "dev": true, "funding": { "type": "opencollective", @@ -1140,9 +1530,9 @@ } }, "node_modules/unist-util-is": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", - "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.1.tgz", + "integrity": "sha512-F5CZ68eYzuSvJjGhCLPL3cYx45IxkqXSetCcRgUXtbcm50X2L9oOWQlfUfDdAf+6Pd27YDblBfdtmsThXmwpbQ==", "dev": true, "funding": { "type": "opencollective", @@ -1150,9 +1540,9 @@ } }, "node_modules/unist-util-position": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", - "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.1.tgz", + "integrity": "sha512-mgy/zI9fQ2HlbOtTdr2w9lhVaiFUHWQnZrFF2EUoVOqtAUdzqMtNiD99qA5a1IcjWVR8O6aVYE9u7Z2z1v0SQA==", "dev": true, "funding": { "type": "opencollective", @@ -1175,33 +1565,13 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/unist-util-select/node_modules/unist-util-is": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.0.tgz", - "integrity": "sha512-pWspZ+AvTqYbC+xWeRmzGqbcY8Na08Eowlfs2xchWTYot8vBBAq+syrE/LWS0bw1D/JOu4lwzDbEb6Mz13tK+g==", - "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-select/node_modules/zwitch": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz", - "integrity": "sha512-JZxotl7SxAJH0j7dN4pxsTV6ZLXoLdGME+PsjkL/DaBrVryK9kTGq06GfKrwcSOqypP+fdXGoCHE36b99fWVoA==", - "dev": true, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, "node_modules/unist-util-stringify-position": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", - "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", + "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", "dev": true, "dependencies": { - "@types/unist": "^2.0.2" + "@types/unist": "^2.0.0" }, "funding": { "type": "opencollective", @@ -1209,14 +1579,14 @@ } }, "node_modules/unist-util-visit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-3.1.0.tgz", - "integrity": "sha512-Szoh+R/Ll68QWAyQyZZpQzZQm2UPbxibDvaY8Xc9SUtYgPsDzx5AWSk++UUt2hJuow8mvwR+rG+LQLw+KsuAKA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.0.0.tgz", + "integrity": "sha512-3HWTvrtU10/E7qgPznBfiOyG0TXj9W8c1GSfaI8L9GkaG1pLePiQPZ7E35a0R3ToQ/zcy4Im6aZ9WBgOTnv1MQ==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", "unist-util-is": "^5.0.0", - "unist-util-visit-parents": "^4.0.0" + "unist-util-visit-parents": "^5.0.0" }, "funding": { "type": "opencollective", @@ -1224,33 +1594,23 @@ } }, "node_modules/unist-util-visit-parents": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", - "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-4.1.1.tgz", + "integrity": "sha512-1xAFJXAKpnnJl8G7K5KgU7FY55y3GcLIXqkzUj5QF/QVP7biUm0K0O2oqVkYsdjzJKifYeWn9+o6piAK2hGSHw==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" + "unist-util-is": "^5.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/unist-util-visit/node_modules/unist-util-is": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.0.tgz", - "integrity": "sha512-pWspZ+AvTqYbC+xWeRmzGqbcY8Na08Eowlfs2xchWTYot8vBBAq+syrE/LWS0bw1D/JOu4lwzDbEb6Mz13tK+g==", - "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, "node_modules/unist-util-visit/node_modules/unist-util-visit-parents": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-4.1.1.tgz", - "integrity": "sha512-1xAFJXAKpnnJl8G7K5KgU7FY55y3GcLIXqkzUj5QF/QVP7biUm0K0O2oqVkYsdjzJKifYeWn9+o6piAK2hGSHw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.0.0.tgz", + "integrity": "sha512-CVaLOYPM/EaFTYMytbaju3Tw4QI3DHnHFnL358FkEu0hZOzSm/hqBdVwOQDR60jF5ZzhB1tlZlRH0ll/yekZIQ==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", @@ -1262,15 +1622,15 @@ } }, "node_modules/vfile": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", - "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.1.0.tgz", + "integrity": "sha512-4o7/DJjEaFPYSh0ckv5kcYkJTHQgCKdL8ozMM1jLAxO9ox95IzveDPXCZp08HamdWq8JXTkClDvfAKaeLQeKtg==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^2.0.0", - "vfile-message": "^2.0.0" + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" }, "funding": { "type": "opencollective", @@ -1278,23 +1638,27 @@ } }, "node_modules/vfile-location": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", - "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.0.1.tgz", + "integrity": "sha512-JDxPlTbZrZCQXogGheBHjbRWjESSPEak770XwWPfw5mTc1v1nWGLB/apzZxsx8a0SJVfF8HK8ql8RD308vXRUw==", "dev": true, + "dependencies": { + "@types/unist": "^2.0.0", + "vfile": "^5.0.0" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, "node_modules/vfile-message": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", - "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.0.2.tgz", + "integrity": "sha512-UUjZYIOg9lDRwwiBAuezLIsu9KlXntdxwG+nXnjuQAHvBpcX3x0eN8h+I7TkY5nkCXj+cWVp4ZqebtGBvok8ww==", "dev": true, "dependencies": { "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" + "unist-util-stringify-position": "^3.0.0" }, "funding": { "type": "opencollective", @@ -1302,28 +1666,19 @@ } }, "node_modules/web-namespaces": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", - "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.0.tgz", + "integrity": "sha512-dE7ELZRVWh0ceQsRgkjLgsAvwTuv3kcjSY/hLjqL0llleUlQBDjE9JkB9FCBY5F2mnFEwiyJoowl8+NVGHe8dw==", "dev": true, "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "dev": true, - "engines": { - "node": ">=0.4" - } - }, "node_modules/zwitch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", - "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz", + "integrity": "sha512-JZxotl7SxAJH0j7dN4pxsTV6ZLXoLdGME+PsjkL/DaBrVryK9kTGq06GfKrwcSOqypP+fdXGoCHE36b99fWVoA==", "dev": true, "funding": { "type": "github", @@ -1332,34 +1687,55 @@ } }, "dependencies": { + "@types/debug": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz", + "integrity": "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==", + "dev": true, + "requires": { + "@types/ms": "*" + } + }, "@types/hast": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.1.tgz", - "integrity": "sha512-viwwrB+6xGzw+G1eWpF9geV3fnsDgXqHG+cqgiHrvQfDUW5hzhCyV7Sy3UJxhfRFBsgky2SSW33qi/YrIkjX5Q==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz", + "integrity": "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==", "dev": true, "requires": { "@types/unist": "*" } }, "@types/mdast": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz", - "integrity": "sha512-SXPBMnFVQg1s00dlMCc/jCdvPqdE4mXaMMCeRlxLDmTAEoegHT53xKtkDnzDTOcmMHUfcjyf36/YYZ6SxRdnsw==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz", + "integrity": "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==", "dev": true, "requires": { "@types/unist": "*" } }, + "@types/mdurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.2.tgz", + "integrity": "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==", + "dev": true + }, + "@types/ms": { + "version": "0.7.31", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", + "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==", + "dev": true + }, "@types/parse5": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz", - "integrity": "sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-ARATsLdrGPUnaBvxLhUlnltcMgn7pQG312S8ccdYlnyijabrX9RN/KN/iGj9Am96CoW8e/K9628BA7Bv4XHdrA==", "dev": true }, "@types/unist": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.3.tgz", - "integrity": "sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz", + "integrity": "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==", "dev": true }, "argparse": { @@ -1369,9 +1745,9 @@ "dev": true }, "bail": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/bail/-/bail-1.0.5.tgz", - "integrity": "sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.1.tgz", + "integrity": "sha512-d5FoTAr2S5DSUPKl85WNm2yUwsINN8eidIdIwsOge2t33DaOfOdSmmsI11jMN3GmALCXaw+Y6HMVHDzePshFAA==", "dev": true }, "boolbase": { @@ -1381,39 +1757,39 @@ "dev": true }, "ccount": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz", - "integrity": "sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.0.tgz", + "integrity": "sha512-VOR0NWFYX65n9gELQdcpqsie5L5ihBXuZGAgaPEp/U7IOSjnPMEH6geE+2f6lcekaNEfWzAHS45mPvSo5bqsUA==", "dev": true }, "character-entities": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz", - "integrity": "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.0.tgz", + "integrity": "sha512-oHqMj3eAuJ77/P5PaIRcqk+C3hdfNwyCD2DAUcD5gyXkegAuF2USC40CEqPscDk4I8FRGMTojGJQkXDsN5QlJA==", "dev": true }, "character-entities-html4": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-1.1.4.tgz", - "integrity": "sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.0.0.tgz", + "integrity": "sha512-dwT2xh5ZhUAjyP96k57ilMKoTQyASaw9IAMR9U5c1lCu2RUni6O6jxfpUEdO2RcPT6TJFvr8pqsbami4Jk+2oA==", "dev": true }, "character-entities-legacy": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", - "integrity": "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-2.0.0.tgz", + "integrity": "sha512-YwaEtEvWLpFa6Wh3uVLrvirA/ahr9fki/NUd/Bd4OR6EdJ8D22hovYQEOUCBfQfcqnC4IAMGMsHXY1eXgL4ZZA==", "dev": true }, "character-reference-invalid": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz", - "integrity": "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.0.tgz", + "integrity": "sha512-pE3Z15lLRxDzWJy7bBHBopRwfI20sbrMVLQTC7xsPglCHf4Wv1e167OgYAFP78co2XlhojDyAqA+IAJse27//g==", "dev": true }, "comma-separated-tokens": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz", - "integrity": "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.2.tgz", + "integrity": "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==", "dev": true }, "css-selector-parser": { @@ -1423,18 +1799,18 @@ "dev": true }, "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", "dev": true, "requires": { "ms": "2.1.2" } }, "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "dev": true }, "extend": { @@ -1444,9 +1820,9 @@ "dev": true }, "fault": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz", - "integrity": "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.0.tgz", + "integrity": "sha512-JsDj9LFcoC+4ChII1QpXPA7YIaY8zmqPYw7h9j5n7St7a0BBKfNnwEBAUQRBx70o2q4rs+BeSNHk8Exm6xE7fQ==", "dev": true, "requires": { "format": "^0.2.0" @@ -1459,147 +1835,144 @@ "dev": true }, "hast-to-hyperscript": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz", - "integrity": "sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA==", + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-10.0.1.tgz", + "integrity": "sha512-dhIVGoKCQVewFi+vz3Vt567E4ejMppS1haBRL6TEmeLeJVB1i/FJIIg/e6s1Bwn0g5qtYojHEKvyGA+OZuyifw==", "dev": true, "requires": { - "@types/unist": "^2.0.3", - "comma-separated-tokens": "^1.0.0", - "property-information": "^5.3.0", - "space-separated-tokens": "^1.0.0", + "@types/unist": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", "style-to-object": "^0.3.0", - "unist-util-is": "^4.0.0", - "web-namespaces": "^1.0.0" + "unist-util-is": "^5.0.0", + "web-namespaces": "^2.0.0" } }, "hast-util-from-parse5": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz", - "integrity": "sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.0.tgz", + "integrity": "sha512-m8yhANIAccpU4K6+121KpPP55sSl9/samzQSQGpb0mTExcNh2WlvjtMwSWFhg6uqD4Rr6Nfa8N6TMypQM51rzQ==", "dev": true, "requires": { - "@types/parse5": "^5.0.0", - "hastscript": "^6.0.0", - "property-information": "^5.0.0", - "vfile": "^4.0.0", - "vfile-location": "^3.2.0", - "web-namespaces": "^1.0.0" + "@types/hast": "^2.0.0", + "@types/parse5": "^6.0.0", + "@types/unist": "^2.0.0", + "hastscript": "^7.0.0", + "property-information": "^6.0.0", + "vfile": "^5.0.0", + "vfile-location": "^4.0.0", + "web-namespaces": "^2.0.0" } }, "hast-util-is-element": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz", - "integrity": "sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ==", - "dev": true + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-2.1.1.tgz", + "integrity": "sha512-ag0fiZfRWsPiR1udvnSbaazJLGv8qd8E+/e3rW8rUZhbKG4HNJmFL4QkEceN+22BgE+uozXY30z/s+2dL6Z++g==", + "dev": true, + "requires": { + "@types/hast": "^2.0.0", + "@types/unist": "^2.0.0" + } }, "hast-util-parse-selector": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz", - "integrity": "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==", - "dev": true + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.0.tgz", + "integrity": "sha512-AyjlI2pTAZEOeu7GeBPZhROx0RHBnydkQIXlhnFzDi0qfXTmGUWoCYZtomHbrdrheV4VFUlPcfJ6LMF5T6sQzg==", + "dev": true, + "requires": { + "@types/hast": "^2.0.0" + } }, "hast-util-raw": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.1.0.tgz", - "integrity": "sha512-5FoZLDHBpka20OlZZ4I/+RBw5piVQ8iI1doEvffQhx5CbCyTtP8UCq8Tw6NmTAMtXgsQxmhW7Ly8OdFre5/YMQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.0.tgz", + "integrity": "sha512-K2ofsY59XqrtBNUAkvT2vPdyNPUchjj1Z0FxUOwBadS6R5h9O3LaRZqpukQ+YfgQ/IMy9GGMB/Nlpzpu+cuuMA==", "dev": true, "requires": { "@types/hast": "^2.0.0", - "hast-util-from-parse5": "^6.0.0", - "hast-util-to-parse5": "^6.0.0", - "html-void-elements": "^1.0.0", + "@types/parse5": "^6.0.0", + "hast-util-from-parse5": "^7.0.0", + "hast-util-to-parse5": "^7.0.0", + "html-void-elements": "^2.0.0", "parse5": "^6.0.0", - "unist-util-position": "^3.0.0", - "unist-util-visit": "^2.0.0", - "vfile": "^4.0.0", - "web-namespaces": "^1.0.0", - "xtend": "^4.0.0", - "zwitch": "^1.0.0" - }, - "dependencies": { - "unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", - "dev": true, - "requires": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" - } - } + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0", + "vfile": "^5.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" } }, "hast-util-sanitize": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-3.0.2.tgz", - "integrity": "sha512-+2I0x2ZCAyiZOO/sb4yNLFmdwPBnyJ4PBkVTUMKMqBwYNA+lXSgOmoRXlJFazoyid9QPogRRKgKhVEodv181sA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-sanitize/-/hast-util-sanitize-4.0.0.tgz", + "integrity": "sha512-pw56+69jq+QSr/coADNvWTmBPDy+XsmwaF5KnUys4/wM1jt/fZdl7GPxhXXXYdXnz3Gj3qMkbUCH2uKjvX0MgQ==", "dev": true, "requires": { - "xtend": "^4.0.0" + "@types/hast": "^2.0.0" } }, "hast-util-to-html": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-7.1.3.tgz", - "integrity": "sha512-yk2+1p3EJTEE9ZEUkgHsUSVhIpCsL/bvT8E5GzmWc+N1Po5gBw+0F8bo7dpxXR0nu0bQVxVZGX2lBGF21CmeDw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-8.0.2.tgz", + "integrity": "sha512-ipLhUTMyyJi9F/LXaNDG9BrRdshP6obCfmUZYbE/+T639IdzqAOkKN4DyrEyID0gbb+rsC3PKf0XlviZwzomhw==", "dev": true, "requires": { - "ccount": "^1.0.0", - "comma-separated-tokens": "^1.0.0", - "hast-util-is-element": "^1.0.0", - "hast-util-whitespace": "^1.0.0", - "html-void-elements": "^1.0.0", - "property-information": "^5.0.0", - "space-separated-tokens": "^1.0.0", - "stringify-entities": "^3.0.1", - "unist-util-is": "^4.0.0", - "xtend": "^4.0.0" + "@types/hast": "^2.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-is-element": "^2.0.0", + "hast-util-whitespace": "^2.0.0", + "html-void-elements": "^2.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "unist-util-is": "^5.0.0" } }, "hast-util-to-parse5": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", - "integrity": "sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.0.0.tgz", + "integrity": "sha512-YHiS6aTaZ3N0Q3nxaY/Tj98D6kM8QX5Q8xqgg8G45zR7PvWnPGPP0vcKCgb/moIydEJ/QWczVrX0JODCVeoV7A==", "dev": true, "requires": { - "hast-to-hyperscript": "^9.0.0", - "property-information": "^5.0.0", - "web-namespaces": "^1.0.0", - "xtend": "^4.0.0", - "zwitch": "^1.0.0" + "@types/hast": "^2.0.0", + "@types/parse5": "^6.0.0", + "hast-to-hyperscript": "^10.0.0", + "property-information": "^6.0.0", + "web-namespaces": "^2.0.0", + "zwitch": "^2.0.0" } }, "hast-util-whitespace": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz", - "integrity": "sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.0.tgz", + "integrity": "sha512-Pkw+xBHuV6xFeJprJe2BBEoDV+AvQySaz3pPDRUs5PNZEMQjpXJJueqrpcHIXxnWTcAGi/UOCgVShlkY6kLoqg==", "dev": true }, "hastscript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", - "integrity": "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.0.2.tgz", + "integrity": "sha512-uA8ooUY4ipaBvKcMuPehTAB/YfFLSSzCwFSwT6ltJbocFUKH/GDHLN+tflq7lSRf9H86uOuxOFkh1KgIy3Gg2g==", "dev": true, "requires": { "@types/hast": "^2.0.0", - "comma-separated-tokens": "^1.0.0", - "hast-util-parse-selector": "^2.0.0", - "property-information": "^5.0.0", - "space-separated-tokens": "^1.0.0" + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^3.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" } }, "highlight.js": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", - "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.2.0.tgz", + "integrity": "sha512-JOySjtOEcyG8s4MLR2MNbLUyaXqUunmSnL2kdV/KuGJOmHZuAR5xC54Ko7goAXBWNhf09Vy3B+U7vR62UZ/0iw==", "dev": true }, "html-void-elements": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz", - "integrity": "sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.0.tgz", + "integrity": "sha512-4OYzQQsBt0G9bJ/nM9/DDsjm4+fVdzAaPJJcWk5QwA3GIAPxQEeOR0rsI8HbDHQz5Gta8pVvGnnTNSbZVEVvkQ==", "dev": true }, "inline-style-parser": { @@ -1609,19 +1982,19 @@ "dev": true }, "is-alphabetical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz", - "integrity": "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.0.tgz", + "integrity": "sha512-5OV8Toyq3oh4eq6sbWTYzlGdnMT/DPI5I0zxUBxjiigQsZycpkKF3kskkao3JyYGuYDHvhgJF+DrjMQp9SX86w==", "dev": true }, "is-alphanumerical": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz", - "integrity": "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.0.tgz", + "integrity": "sha512-t+2GlJ+hO9yagJ+jU3+HSh80VKvz/3cG2cxbGGm4S0hjKuhWQXgPVUVOZz3tqZzMjhmphZ+1TIJTlRZRoe6GCQ==", "dev": true, "requires": { - "is-alphabetical": "^1.0.0", - "is-decimal": "^1.0.0" + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" } }, "is-buffer": { @@ -1631,21 +2004,21 @@ "dev": true }, "is-decimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz", - "integrity": "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.0.tgz", + "integrity": "sha512-QfrfjQV0LjoWQ1K1XSoEZkTAzSa14RKVMa5zg3SdAfzEmQzRM4+tbSFWb78creCeA9rNBzaZal92opi1TwPWZw==", "dev": true }, "is-hexadecimal": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz", - "integrity": "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.0.tgz", + "integrity": "sha512-vGOtYkiaxwIiR0+Ng/zNId+ZZehGfINwTzdrDqc6iubbnQWhnPuYymOzOKUDqa2cSl59yHnEh2h6MvRLQsyNug==", "dev": true }, "is-plain-obj": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", - "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.0.0.tgz", + "integrity": "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==", "dev": true }, "js-yaml": { @@ -1658,174 +2031,169 @@ } }, "longest-streak": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", - "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.0.0.tgz", + "integrity": "sha512-XhUjWR5CFaQ03JOP+iSDS9koy8T5jfoImCZ4XprElw3BXsSk4MpVYOLw/6LTDKZhO13PlAXnB5gS4MHQTpkSOw==", "dev": true }, "markdown-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", - "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", - "dev": true, - "requires": { - "repeat-string": "^1.0.0" - } + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.1.tgz", + "integrity": "sha512-CBbaYXKSGnE1uLRpKA1SWgIRb2PQrpkllNWpZtZe6VojOJ4ysqiq7/2glYcmKsOYN09QgH/HEBX5hIshAeiK6A==", + "dev": true }, "mdast-util-definitions": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz", - "integrity": "sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.0.tgz", + "integrity": "sha512-5hcR7FL2EuZ4q6lLMUK5w4lHT2H3vqL9quPvYZ/Ku5iifrirfMHiGdhxdXMUbUkDmz5I+TYMd7nbaxUhbQkfpQ==", "dev": true, "requires": { - "unist-util-visit": "^2.0.0" + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "unist-util-visit": "^3.0.0" }, "dependencies": { "unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-3.1.0.tgz", + "integrity": "sha512-Szoh+R/Ll68QWAyQyZZpQzZQm2UPbxibDvaY8Xc9SUtYgPsDzx5AWSk++UUt2hJuow8mvwR+rG+LQLw+KsuAKA==", "dev": true, "requires": { "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^4.0.0" } } } }, "mdast-util-find-and-replace": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", - "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.1.0.tgz", + "integrity": "sha512-1w1jbqAd13oU78QPBf5223+xB+37ecNtQ1JElq2feWols5oEYAl+SgNDnOZipe7NfLemoEt362yUS15/wip4mw==", "dev": true, "requires": { - "escape-string-regexp": "^4.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^4.0.0" } }, "mdast-util-from-markdown": { - "version": "0.8.5", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-0.8.5.tgz", - "integrity": "sha512-2hkTXtYYnr+NubD/g6KGBS/0mFmBcifAsI0yIWRiRo0PjVs6SSOSOdtzbp6kSGnShDN6G5aWZpKQ2lWRy27mWQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.0.0.tgz", + "integrity": "sha512-uj2G60sb7z1PNOeElFwCC9b/Se/lFXuLhVKFOAY2EHz/VvgbupTQRNXPoZl7rGpXYL6BNZgcgaybrlSWbo7n/g==", "dev": true, "requires": { "@types/mdast": "^3.0.0", - "mdast-util-to-string": "^2.0.0", - "micromark": "~2.11.0", - "parse-entities": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" + "@types/unist": "^2.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "parse-entities": "^3.0.0", + "unist-util-stringify-position": "^3.0.0" } }, "mdast-util-frontmatter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-0.2.0.tgz", - "integrity": "sha512-FHKL4w4S5fdt1KjJCwB0178WJ0evnyyQr5kXTM3wrOVpytD0hrkvd+AOOjU9Td8onOejCkmZ+HQRT3CZ3coHHQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-1.0.0.tgz", + "integrity": "sha512-7itKvp0arEVNpCktOET/eLFAYaZ+0cNjVtFtIPxgQ5tV+3i+D4SDDTjTzPWl44LT59PC+xdx+glNTawBdF98Mw==", "dev": true, "requires": { - "micromark-extension-frontmatter": "^0.2.0" + "micromark-extension-frontmatter": "^1.0.0" } }, "mdast-util-gfm": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", - "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-1.0.0.tgz", + "integrity": "sha512-JY4qImsTqivQ0Gl3qvdaizCpomFaNrHnjEhNjNNKeNEA5jZHAJDYu1+yO4V9jn4/ti8GrKdAScaT4F71knoxsA==", "dev": true, "requires": { - "mdast-util-gfm-autolink-literal": "^0.1.0", - "mdast-util-gfm-strikethrough": "^0.2.0", - "mdast-util-gfm-table": "^0.1.0", - "mdast-util-gfm-task-list-item": "^0.1.0", - "mdast-util-to-markdown": "^0.6.1" + "mdast-util-gfm-autolink-literal": "^1.0.0", + "mdast-util-gfm-strikethrough": "^1.0.0", + "mdast-util-gfm-table": "^1.0.0", + "mdast-util-gfm-task-list-item": "^1.0.0" } }, "mdast-util-gfm-autolink-literal": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", - "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.1.tgz", + "integrity": "sha512-dCUDNYXCytIonTHIUOZXp5S3FWd1XAt6IVH1fBfH6BbUF9U+9m1T9XllfHPvKJCccKNI+0RlYmQJ0rfMTDxEtA==", "dev": true, "requires": { - "ccount": "^1.0.0", - "mdast-util-find-and-replace": "^1.1.0", - "micromark": "^2.11.3" + "@types/mdast": "^3.0.0", + "ccount": "^2.0.0", + "mdast-util-find-and-replace": "^2.0.0", + "micromark-util-character": "^1.0.0" } }, "mdast-util-gfm-strikethrough": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", - "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.0.tgz", + "integrity": "sha512-gM9ipBUdRxYa6Yq1Hd8Otg6jEn/dRxFZ1F9ZX4QHosHOexLGqNZO2dh0A+YFbUEd10RcKjnjb4jOfJJzoXXUew==", "dev": true, "requires": { - "mdast-util-to-markdown": "^0.6.0" + "@types/mdast": "^3.0.3", + "mdast-util-to-markdown": "^1.0.0" } }, "mdast-util-gfm-table": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", - "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.1.tgz", + "integrity": "sha512-NByKuaSg5+M6r9DZBPXFUmhMHGFf9u+WE76EeStN01ghi8hpnydiWBXr+qj0XCRWI7SAMNtEjGvip6zci9axQA==", "dev": true, "requires": { - "markdown-table": "^2.0.0", - "mdast-util-to-markdown": "~0.6.0" + "markdown-table": "^3.0.0", + "mdast-util-to-markdown": "^1.0.0" } }, "mdast-util-gfm-task-list-item": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", - "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.0.tgz", + "integrity": "sha512-dwkzOTjQe8JCCHVE3Cb0pLHTYLudf7t9WCAnb20jI8/dW+VHjgWhjtIUVA3oigNkssgjEwX+i+3XesUdCnXGyA==", "dev": true, "requires": { - "mdast-util-to-markdown": "~0.6.0" + "@types/mdast": "^3.0.3", + "mdast-util-to-markdown": "^1.0.0" } }, "mdast-util-to-hast": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.2.0.tgz", - "integrity": "sha512-JoPBfJ3gBnHZ18icCwHR50orC9kNH81tiR1gs01D8Q5YpV6adHNO9nKNuFBCJQ941/32PT1a63UF/DitmS3amQ==", + "version": "11.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-11.2.1.tgz", + "integrity": "sha512-tvy3qzo/SKxkQ9smt7D7NX+4nAQng+eK4/A7PVUzAT4+n0NtgaDRyZA2DmGExAbW7xUX4O+4jkO2u94dNStssw==", "dev": true, "requires": { + "@types/hast": "^2.0.0", "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "mdast-util-definitions": "^4.0.0", + "@types/mdurl": "^1.0.0", + "mdast-util-definitions": "^5.0.0", "mdurl": "^1.0.0", - "unist-builder": "^2.0.0", - "unist-util-generated": "^1.0.0", - "unist-util-position": "^3.0.0", - "unist-util-visit": "^2.0.0" - }, - "dependencies": { - "unist-util-visit": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz", - "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==", - "dev": true, - "requires": { - "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0", - "unist-util-visit-parents": "^3.0.0" - } - } + "unist-builder": "^3.0.0", + "unist-util-generated": "^2.0.0", + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0" } }, "mdast-util-to-markdown": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", - "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.2.2.tgz", + "integrity": "sha512-G8/IwiB2clO8tJsw/fBNNilZ8wDKJnnOts0a3Ls6DVKiy+K7wHGfZDMxLrYXW3QuHiNOZiSU1KduL1oBY4MQqQ==", "dev": true, "requires": { + "@types/mdast": "^3.0.0", "@types/unist": "^2.0.0", - "longest-streak": "^2.0.0", - "mdast-util-to-string": "^2.0.0", - "parse-entities": "^2.0.0", - "repeat-string": "^1.0.0", - "zwitch": "^1.0.0" + "longest-streak": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "parse-entities": "^3.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" } }, "mdast-util-to-string": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", - "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.0.tgz", + "integrity": "sha512-n4Vypz/DZgwo0iMHLQL49dJzlp7YtAJP+N07MZHpjPf/5XJuHUWstviF4Mn2jEiR/GNmtnRRqnwsXExk3igfFA==", "dev": true }, "mdurl": { @@ -1835,80 +2203,305 @@ "dev": true }, "micromark": { - "version": "2.11.4", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", - "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.0.5.tgz", + "integrity": "sha512-QfjERBnPw0G9mxhOCkkbRP0n8SX8lIBLrEKeEVceviUukqVMv3hWE4AgNTOK/W6GWqtPvvIHg2Apl3j1Dxm6aQ==", "dev": true, "requires": { + "@types/debug": "^4.0.0", "debug": "^4.0.0", - "parse-entities": "^2.0.0" + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "parse-entities": "^3.0.0" + } + }, + "micromark-core-commonmark": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.0.1.tgz", + "integrity": "sha512-vEOw8hcQ3nwHkKKNIyP9wBi8M50zjNajtmI+cCUWcVfJS+v5/3WCh4PLKf7PPRZFUutjzl4ZjlHwBWUKfb/SkA==", + "dev": true, + "requires": { + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "parse-entities": "^3.0.0" } }, "micromark-extension-frontmatter": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-0.2.2.tgz", - "integrity": "sha512-q6nPLFCMTLtfsctAuS0Xh4vaolxSFUWUWR6PZSrXXiRy+SANGllpcqdXFv2z07l0Xz/6Hl40hK0ffNCJPH2n1A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-1.0.0.tgz", + "integrity": "sha512-EXjmRnupoX6yYuUJSQhrQ9ggK0iQtQlpi6xeJzVD5xscyAI+giqco5fdymayZhJMbIFecjnE2yz85S9NzIgQpg==", "dev": true, "requires": { - "fault": "^1.0.0" + "fault": "^2.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0" } }, "micromark-extension-gfm": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", - "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-1.0.0.tgz", + "integrity": "sha512-OjqbQPL1Vec/4l5hnC8WnMNmWwgrT9JvzR2udqIGrGKecZsdwY9GAWZ5482CuD12SXuHNj8aS8epni6ip0Pwog==", "dev": true, "requires": { - "micromark": "~2.11.0", - "micromark-extension-gfm-autolink-literal": "~0.5.0", - "micromark-extension-gfm-strikethrough": "~0.6.5", - "micromark-extension-gfm-table": "~0.4.0", - "micromark-extension-gfm-tagfilter": "~0.3.0", - "micromark-extension-gfm-task-list-item": "~0.3.0" + "micromark-extension-gfm-autolink-literal": "^1.0.0", + "micromark-extension-gfm-strikethrough": "^1.0.0", + "micromark-extension-gfm-table": "^1.0.0", + "micromark-extension-gfm-tagfilter": "^1.0.0", + "micromark-extension-gfm-task-list-item": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, "micromark-extension-gfm-autolink-literal": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", - "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.0.tgz", + "integrity": "sha512-t+K0aPK32mXypVTEKV+WRfoT/Rb7MERDgHZVRr56NXpyQQhgMk72QnK4NljYUlrgbuesH+MxiPQwThzqRDIwvA==", "dev": true, "requires": { - "micromark": "~2.11.3" + "micromark-util-character": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, "micromark-extension-gfm-strikethrough": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", - "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.1.tgz", + "integrity": "sha512-fzGYXWz9HPWH1uHqYwdyR8XpEtuoYVHUjTdPQTnl3ETVZOQe1NXMwE3RA7AMqeON52hG+kO9g1/P1+pLONBSMQ==", "dev": true, "requires": { - "micromark": "~2.11.0" + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, "micromark-extension-gfm-table": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", - "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.0.tgz", + "integrity": "sha512-OATRuHDgEAT/aaJJRSdU12V+s01kNSnJ0jumdfLq5mPy0F5DkR3zbTSFLH4tjVYM0/kEG6umxIhHY62mFe4z5Q==", "dev": true, "requires": { - "micromark": "~2.11.0" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, "micromark-extension-gfm-tagfilter": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", - "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", - "dev": true + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.0.tgz", + "integrity": "sha512-GGUZhzQrOdHR8RHU2ru6K+4LMlj+pBdNuXRtw5prOflDOk2hHqDB0xEgej1AHJ2VETeycX7tzQh2EmaTUOmSKg==", + "dev": true, + "requires": { + "micromark-util-types": "^1.0.0" + } }, "micromark-extension-gfm-task-list-item": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", - "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.0.tgz", + "integrity": "sha512-3tkHCq1NNwijtwpjYba9+rl1yvQ4xYg8iQpUAfTJRyq8MtIEsBUF/vW6B9Gh8Qwy1hE2FmpyHhP4jnFAt61zLg==", + "dev": true, + "requires": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-destination": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz", + "integrity": "sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==", + "dev": true, + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-label": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.0.0.tgz", + "integrity": "sha512-XWEucVZb+qBCe2jmlOnWr6sWSY6NHx+wtpgYFsm4G+dufOf6tTQRRo0bdO7XSlGPu5fyjpJenth6Ksnc5Mwfww==", + "dev": true, + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-space": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.0.0.tgz", + "integrity": "sha512-qUmqs4kj9a5yBnk3JMLyjtWYN6Mzfcx8uJfi5XAveBniDevmZasdGBba5b4QsvRcAkmvGo5ACmSUmyGiKTLZew==", + "dev": true, + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-title": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.0.0.tgz", + "integrity": "sha512-flvC7Gx0dWVWorXuBl09Cr3wB5FTuYec8pMGVySIp2ZlqTcIjN/lFohZcP0EG//krTptm34kozHk7aK/CleCfA==", + "dev": true, + "requires": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-whitespace": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.0.0.tgz", + "integrity": "sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A==", + "dev": true, + "requires": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.1.0.tgz", + "integrity": "sha512-agJ5B3unGNJ9rJvADMJ5ZiYjBRyDpzKAOk01Kpi1TKhlT1APx3XZk6eN7RtSz1erbWHC2L8T3xLZ81wdtGRZzg==", + "dev": true, + "requires": { + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-chunked": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.0.0.tgz", + "integrity": "sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g==", + "dev": true, + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-classify-character": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.0.0.tgz", + "integrity": "sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA==", + "dev": true, + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-combine-extensions": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.0.0.tgz", + "integrity": "sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA==", + "dev": true, + "requires": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-decode-numeric-character-reference": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.0.0.tgz", + "integrity": "sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w==", + "dev": true, + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-encode": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.0.0.tgz", + "integrity": "sha512-cJpFVM768h6zkd8qJ1LNRrITfY4gwFt+tziPcIf71Ui8yFzY9wG3snZQqiWVq93PG4Sw6YOtcNiKJfVIs9qfGg==", + "dev": true + }, + "micromark-util-html-tag-name": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.0.0.tgz", + "integrity": "sha512-NenEKIshW2ZI/ERv9HtFNsrn3llSPZtY337LID/24WeLqMzeZhBEE6BQ0vS2ZBjshm5n40chKtJ3qjAbVV8S0g==", + "dev": true + }, + "micromark-util-normalize-identifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.0.0.tgz", + "integrity": "sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg==", + "dev": true, + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-resolve-all": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.0.0.tgz", + "integrity": "sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw==", + "dev": true, + "requires": { + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-sanitize-uri": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.0.0.tgz", + "integrity": "sha512-cCxvBKlmac4rxCGx6ejlIviRaMKZc0fWm5HdCHEeDWRSkn44l6NdYVRyU+0nT1XC72EQJMZV8IPHF+jTr56lAg==", + "dev": true, + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-subtokenize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.0.0.tgz", + "integrity": "sha512-EsnG2qscmcN5XhkqQBZni/4oQbLFjz9yk3ZM/P8a3YUjwV6+6On2wehr1ALx0MxK3+XXXLTzuBKHDFeDFYRdgQ==", "dev": true, "requires": { - "micromark": "~2.11.0" + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, + "micromark-util-symbol": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.0.0.tgz", + "integrity": "sha512-NZA01jHRNCt4KlOROn8/bGi6vvpEmlXld7EHcRH+aYWUfL3Wc8JLUNNlqUMKa0hhz6GrpUWsHtzPmKof57v0gQ==", + "dev": true + }, + "micromark-util-types": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.0.1.tgz", + "integrity": "sha512-UT0ylWEEy80RFYzK9pEaugTqaxoD/j0Y9WhHpSyitxd99zjoQz7JJ+iKuhPAgOW2MiPSUAx+c09dcqokeyaROA==", + "dev": true + }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -1916,26 +2509,26 @@ "dev": true }, "nth-check": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", - "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "dev": true, "requires": { "boolbase": "^1.0.0" } }, "parse-entities": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz", - "integrity": "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-3.0.0.tgz", + "integrity": "sha512-AJlcIFDNPEP33KyJLguv0xJc83BNvjxwpuUIcetyXUsLpVXAUCePJ5kIoYtEN2R1ac0cYaRu/vk9dVFkewHQhQ==", "dev": true, "requires": { - "character-entities": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "character-reference-invalid": "^1.0.0", - "is-alphanumerical": "^1.0.0", - "is-decimal": "^1.0.0", - "is-hexadecimal": "^1.0.0" + "character-entities": "^2.0.0", + "character-entities-legacy": "^2.0.0", + "character-reference-invalid": "^2.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" } }, "parse5": { @@ -1945,102 +2538,107 @@ "dev": true }, "property-information": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", - "integrity": "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==", - "dev": true, - "requires": { - "xtend": "^4.0.0" - } + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.0.1.tgz", + "integrity": "sha512-F4WUUAF7fMeF4/JUFHNBWDaKDXi2jbvqBW/y6o5wsf3j19wTZ7S60TmtB5HoBhtgw7NKQRMWuz5vk2PR0CygUg==", + "dev": true }, "rehype-raw": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-5.1.0.tgz", - "integrity": "sha512-MDvHAb/5mUnif2R+0IPCYJU8WjHa9UzGtM/F4AVy5GixPlDZ1z3HacYy4xojDU+uBa+0X/3PIfyQI26/2ljJNA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/rehype-raw/-/rehype-raw-6.1.0.tgz", + "integrity": "sha512-12j2UiiYJgZFdjnHDny77NY5BF3eW4Jsl0vtgL1DWdTzcHjPpbhumU+GtPUdivEWwQc8x9OdEuO0oxaGz7Tvyg==", "dev": true, "requires": { - "hast-util-raw": "^6.1.0" + "@types/hast": "^2.0.0", + "hast-util-raw": "^7.2.0", + "unified": "^10.0.0" } }, "rehype-stringify": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-8.0.0.tgz", - "integrity": "sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-9.0.2.tgz", + "integrity": "sha512-BuVA6lAEYtOpXO2xuHLohAzz8UNoQAxAqYRqh4QEEtU39Co+P1JBZhw6wXA9hMWp+JLcmrxWH8+UKcNSr443Fw==", "dev": true, "requires": { - "hast-util-to-html": "^7.1.1" + "@types/hast": "^2.0.0", + "hast-util-to-html": "^8.0.0", + "unified": "^10.0.0" } }, "remark-frontmatter": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-3.0.0.tgz", - "integrity": "sha512-mSuDd3svCHs+2PyO29h7iijIZx4plX0fheacJcAoYAASfgzgVIcXGYSq9GFyYocFLftQs8IOmmkgtOovs6d4oA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-4.0.0.tgz", + "integrity": "sha512-0J+2czWAS9sz9baJJel4tTUnNhMI7wYgih99Hxhdeq2GpdI1Ctu0iol6zAsWw5xa+jLsZXNiwEnnJAJo3XX3hw==", "dev": true, "requires": { - "mdast-util-frontmatter": "^0.2.0", - "micromark-extension-frontmatter": "^0.2.0" + "@types/mdast": "^3.0.0", + "mdast-util-frontmatter": "^1.0.0", + "micromark-extension-frontmatter": "^1.0.0", + "unified": "^10.0.0" } }, "remark-gfm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", - "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-2.0.0.tgz", + "integrity": "sha512-waIv4Tjcd2CTUDxKRYzuPyIHw1FoX4H2GjXAzXV9PxQWb+dU4fJivd/FZ+nxyzPARrqTjMIkwIwPoWNbpBhjcQ==", "dev": true, "requires": { - "mdast-util-gfm": "^0.1.0", - "micromark-extension-gfm": "^0.3.0" + "@types/mdast": "^3.0.0", + "mdast-util-gfm": "^1.0.0", + "micromark-extension-gfm": "^1.0.0", + "unified": "^10.0.0" } }, "remark-html": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-13.0.2.tgz", - "integrity": "sha512-LhSRQ+3RKdBqB/RGesFWkNNfkGqprDUCwjq54SylfFeNyZby5kqOG8Dn/vYsRoM8htab6EWxFXCY6XIZvMoRiQ==", + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/remark-html/-/remark-html-14.0.1.tgz", + "integrity": "sha512-a/x5bTlFrkwYkz43zuJIk0m0IuS5Rx8zLztGwdzmAdUj0Hsi4C4nkJ8gTQRNXY/ET/gMrqQORMMI0arRItq/aQ==", "dev": true, "requires": { - "hast-util-sanitize": "^3.0.0", - "hast-util-to-html": "^7.0.0", - "mdast-util-to-hast": "^10.0.0" + "@types/mdast": "^3.0.0", + "hast-util-sanitize": "^4.0.0", + "hast-util-to-html": "^8.0.0", + "mdast-util-to-hast": "^11.0.0", + "unified": "^10.0.0" } }, "remark-parse": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-9.0.0.tgz", - "integrity": "sha512-geKatMwSzEXKHuzBNU1z676sGcDcFoChMK38TgdHJNAYfFtsfHDQG7MoJAjs6sgYMqyLduCYWDIWZIxiPeafEw==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.0.tgz", + "integrity": "sha512-07ei47p2Xl7Bqbn9H2VYQYirnAFJPwdMuypdozWsSbnmrkgA2e2sZLZdnDNrrsxR4onmIzH/J6KXqKxCuqHtPQ==", "dev": true, "requires": { - "mdast-util-from-markdown": "^0.8.0" + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "unified": "^10.0.0" } }, "remark-rehype": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-8.1.0.tgz", - "integrity": "sha512-EbCu9kHgAxKmW1yEYjx3QafMyGY3q8noUbNUI5xyKbaFP89wbhDrKxyIQNukNYthzjNHZu6J7hwFg7hRm1svYA==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-9.0.0.tgz", + "integrity": "sha512-SFA+mPWu45ynFPKeT3h5eNNVAYoMp3wizr3KSKh1IQ9L6dLSyD25/df6/vv8EW8ji3O3dnZGdbLQl592Tn+ydg==", "dev": true, "requires": { - "mdast-util-to-hast": "^10.2.0" + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-to-hast": "^11.0.0", + "unified": "^10.0.0" } }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true - }, "space-separated-tokens": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz", - "integrity": "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.1.tgz", + "integrity": "sha512-ekwEbFp5aqSPKaqeY1PGrlGQxPNaq+Cnx4+bE2D8sciBQrHpbwoBbawqTN2+6jPs9IdWxxiUcN0K2pkczD3zmw==", "dev": true }, "stringify-entities": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-3.1.0.tgz", - "integrity": "sha512-3FP+jGMmMV/ffZs86MoghGqAoqXAdxLrJP4GUdrDN1aIScYih5tuIO3eF4To5AJZ79KDZ8Fpdy7QJnK8SsL1Vg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.1.tgz", + "integrity": "sha512-gmMQxKXPWIO3NXNSPyWNhlYcBNGpPA/487D+9dLPnU4xBnIrnHdr8cv5rGJOS/1BRxEXRb7uKwg7BA36IWV7xg==", "dev": true, "requires": { - "character-entities-html4": "^1.0.0", - "character-entities-legacy": "^1.0.0", - "xtend": "^4.0.0" + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^2.0.0" } }, "style-to-object": { @@ -2053,90 +2651,61 @@ } }, "to-vfile": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-7.1.0.tgz", - "integrity": "sha512-t1c42ASuWo39ddjh2R+hX5+kbDcc2CmbhaTSJkVavDYMjnFkpq0L4LeF+rcPoDdieGUsFaivSSkmwuFpYzhBZw==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-7.2.2.tgz", + "integrity": "sha512-7WL+coet3qyaYb5vrVrfLtOUHgNv9E1D5SIsyVKmHKcgZefy77WMQRk7FByqGKNInoHOlY6xkTGymo29AwjUKg==", "dev": true, "requires": { "is-buffer": "^2.0.0", - "vfile": "^5.0.0" - }, - "dependencies": { - "unist-util-stringify-position": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", - "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", - "dev": true, - "requires": { - "@types/unist": "^2.0.0" - } - }, - "vfile": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.0.1.tgz", - "integrity": "sha512-lbcf0k66x96Syy36HG+nIBFaSD/fAk589q4nETZTr0JW7eRRmrVo1vHwbD8NlHszUM5ICtFSWQ5xHC292hYZ/w==", - "dev": true, - "requires": { - "@types/unist": "^2.0.0", - "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^3.0.0", - "vfile-message": "^3.0.0" - } - }, - "vfile-message": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.0.1.tgz", - "integrity": "sha512-gYmSHcZZUEtYpTmaWaFJwsuUD70/rTY4v09COp8TGtOkix6gGxb/a8iTQByIY9ciTk9GwAwIXd/J9OPfM4Bvaw==", - "dev": true, - "requires": { - "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^3.0.0" - } - } + "vfile": "^5.1.0" } }, "trough": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz", - "integrity": "sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.0.2.tgz", + "integrity": "sha512-FnHq5sTMxC0sk957wHDzRnemFnNBvt/gSY99HzK8F7UP5WAbvP70yX5bd7CjEQkN+TjdxwI7g7lJ6podqrG2/w==", "dev": true }, "unified": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/unified/-/unified-9.2.1.tgz", - "integrity": "sha512-juWjuI8Z4xFg8pJbnEZ41b5xjGUWGHqXALmBZ3FC3WX0PIx1CZBIIJ6mXbYMcf6Yw4Fi0rFUTA1cdz/BglbOhA==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.0.tgz", + "integrity": "sha512-4U3ru/BRXYYhKbwXV6lU6bufLikoAavTwev89H5UxY8enDFaAT2VXmIXYNm6hb5oHPng/EXr77PVyDFcptbk5g==", "dev": true, "requires": { - "bail": "^1.0.0", + "@types/unist": "^2.0.0", + "bail": "^2.0.0", "extend": "^3.0.0", "is-buffer": "^2.0.0", - "is-plain-obj": "^2.0.0", - "trough": "^1.0.0", - "vfile": "^4.0.0" + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" } }, "unist-builder": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz", - "integrity": "sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw==", - "dev": true + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-builder/-/unist-builder-3.0.0.tgz", + "integrity": "sha512-GFxmfEAa0vi9i5sd0R2kcrI9ks0r82NasRq5QHh2ysGngrc6GiqD5CDf1FjPenY4vApmFASBIIlk/jj5J5YbmQ==", + "dev": true, + "requires": { + "@types/unist": "^2.0.0" + } }, "unist-util-generated": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz", - "integrity": "sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.0.tgz", + "integrity": "sha512-TiWE6DVtVe7Ye2QxOVW9kqybs6cZexNwTwSMVgkfjEReqy/xwGpAXb99OxktoWwmL+Z+Epb0Dn8/GNDYP1wnUw==", "dev": true }, "unist-util-is": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz", - "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.1.tgz", + "integrity": "sha512-F5CZ68eYzuSvJjGhCLPL3cYx45IxkqXSetCcRgUXtbcm50X2L9oOWQlfUfDdAf+6Pd27YDblBfdtmsThXmwpbQ==", "dev": true }, "unist-util-position": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz", - "integrity": "sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.1.tgz", + "integrity": "sha512-mgy/zI9fQ2HlbOtTdr2w9lhVaiFUHWQnZrFF2EUoVOqtAUdzqMtNiD99qA5a1IcjWVR8O6aVYE9u7Z2z1v0SQA==", "dev": true }, "unist-util-select": { @@ -2149,52 +2718,32 @@ "nth-check": "^2.0.0", "unist-util-is": "^5.0.0", "zwitch": "^2.0.0" - }, - "dependencies": { - "unist-util-is": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.0.tgz", - "integrity": "sha512-pWspZ+AvTqYbC+xWeRmzGqbcY8Na08Eowlfs2xchWTYot8vBBAq+syrE/LWS0bw1D/JOu4lwzDbEb6Mz13tK+g==", - "dev": true - }, - "zwitch": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz", - "integrity": "sha512-JZxotl7SxAJH0j7dN4pxsTV6ZLXoLdGME+PsjkL/DaBrVryK9kTGq06GfKrwcSOqypP+fdXGoCHE36b99fWVoA==", - "dev": true - } } }, "unist-util-stringify-position": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz", - "integrity": "sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.0.tgz", + "integrity": "sha512-SdfAl8fsDclywZpfMDTVDxA2V7LjtRDTOFd44wUJamgl6OlVngsqWjxvermMYf60elWHbxhuRCZml7AnuXCaSA==", "dev": true, "requires": { - "@types/unist": "^2.0.2" + "@types/unist": "^2.0.0" } }, "unist-util-visit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-3.1.0.tgz", - "integrity": "sha512-Szoh+R/Ll68QWAyQyZZpQzZQm2UPbxibDvaY8Xc9SUtYgPsDzx5AWSk++UUt2hJuow8mvwR+rG+LQLw+KsuAKA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.0.0.tgz", + "integrity": "sha512-3HWTvrtU10/E7qgPznBfiOyG0TXj9W8c1GSfaI8L9GkaG1pLePiQPZ7E35a0R3ToQ/zcy4Im6aZ9WBgOTnv1MQ==", "dev": true, "requires": { "@types/unist": "^2.0.0", "unist-util-is": "^5.0.0", - "unist-util-visit-parents": "^4.0.0" + "unist-util-visit-parents": "^5.0.0" }, "dependencies": { - "unist-util-is": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.0.tgz", - "integrity": "sha512-pWspZ+AvTqYbC+xWeRmzGqbcY8Na08Eowlfs2xchWTYot8vBBAq+syrE/LWS0bw1D/JOu4lwzDbEb6Mz13tK+g==", - "dev": true - }, "unist-util-visit-parents": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-4.1.1.tgz", - "integrity": "sha512-1xAFJXAKpnnJl8G7K5KgU7FY55y3GcLIXqkzUj5QF/QVP7biUm0K0O2oqVkYsdjzJKifYeWn9+o6piAK2hGSHw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.0.0.tgz", + "integrity": "sha512-CVaLOYPM/EaFTYMytbaju3Tw4QI3DHnHFnL358FkEu0hZOzSm/hqBdVwOQDR60jF5ZzhB1tlZlRH0ll/yekZIQ==", "dev": true, "requires": { "@types/unist": "^2.0.0", @@ -2204,59 +2753,57 @@ } }, "unist-util-visit-parents": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz", - "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-4.1.1.tgz", + "integrity": "sha512-1xAFJXAKpnnJl8G7K5KgU7FY55y3GcLIXqkzUj5QF/QVP7biUm0K0O2oqVkYsdjzJKifYeWn9+o6piAK2hGSHw==", "dev": true, "requires": { "@types/unist": "^2.0.0", - "unist-util-is": "^4.0.0" + "unist-util-is": "^5.0.0" } }, "vfile": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.2.1.tgz", - "integrity": "sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.1.0.tgz", + "integrity": "sha512-4o7/DJjEaFPYSh0ckv5kcYkJTHQgCKdL8ozMM1jLAxO9ox95IzveDPXCZp08HamdWq8JXTkClDvfAKaeLQeKtg==", "dev": true, "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", - "unist-util-stringify-position": "^2.0.0", - "vfile-message": "^2.0.0" + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" } }, "vfile-location": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz", - "integrity": "sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA==", - "dev": true + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.0.1.tgz", + "integrity": "sha512-JDxPlTbZrZCQXogGheBHjbRWjESSPEak770XwWPfw5mTc1v1nWGLB/apzZxsx8a0SJVfF8HK8ql8RD308vXRUw==", + "dev": true, + "requires": { + "@types/unist": "^2.0.0", + "vfile": "^5.0.0" + } }, "vfile-message": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.4.tgz", - "integrity": "sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.0.2.tgz", + "integrity": "sha512-UUjZYIOg9lDRwwiBAuezLIsu9KlXntdxwG+nXnjuQAHvBpcX3x0eN8h+I7TkY5nkCXj+cWVp4ZqebtGBvok8ww==", "dev": true, "requires": { "@types/unist": "^2.0.0", - "unist-util-stringify-position": "^2.0.0" + "unist-util-stringify-position": "^3.0.0" } }, "web-namespaces": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz", - "integrity": "sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw==", - "dev": true - }, - "xtend": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.0.tgz", + "integrity": "sha512-dE7ELZRVWh0ceQsRgkjLgsAvwTuv3kcjSY/hLjqL0llleUlQBDjE9JkB9FCBY5F2mnFEwiyJoowl8+NVGHe8dw==", "dev": true }, "zwitch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz", - "integrity": "sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz", + "integrity": "sha512-JZxotl7SxAJH0j7dN4pxsTV6ZLXoLdGME+PsjkL/DaBrVryK9kTGq06GfKrwcSOqypP+fdXGoCHE36b99fWVoA==", "dev": true } } diff --git a/tools/doc/package.json b/tools/doc/package.json index b349269bd7ed1e..444576a55f1288 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -7,19 +7,19 @@ "node": ">=14.8.0" }, "devDependencies": { - "highlight.js": "11.0.1", - "js-yaml": "4.1.0", - "rehype-raw": "5.1.0", - "rehype-stringify": "8.0.0", - "remark-frontmatter": "^3.0.0", - "remark-gfm": "^1.0.0", - "remark-html": "13.0.2", - "remark-parse": "^9.0.0", - "remark-rehype": "8.1.0", - "to-vfile": "7.1.0", - "unified": "9.2.1", - "unist-util-select": "4.0.0", - "unist-util-visit": "3.1.0" + "highlight.js": "^11.2.0", + "js-yaml": "^4.1.0", + "rehype-raw": "^6.1.0", + "rehype-stringify": "^9.0.2", + "remark-frontmatter": "^4.0.0", + "remark-gfm": "^2.0.0", + "remark-html": "^14.0.1", + "remark-parse": "^10.0.0", + "remark-rehype": "^9.0.0", + "to-vfile": "^7.2.2", + "unified": "^10.1.0", + "unist-util-select": "^4.0.0", + "unist-util-visit": "^4.0.0" }, "bin": { "node-doc-generator": "generate.js" diff --git a/tools/doc/stability.mjs b/tools/doc/stability.mjs index 6b5f182a76e7e0..68d77cc4593ffa 100644 --- a/tools/doc/stability.mjs +++ b/tools/doc/stability.mjs @@ -7,7 +7,7 @@ import htmlStringify from 'rehype-stringify'; import gfm from 'remark-gfm'; import markdown from 'remark-parse'; import remark2rehype from 'remark-rehype'; -import unified from 'unified'; +import { unified } from 'unified'; import { visit } from 'unist-util-visit'; const source = new URL('../../out/doc/api/', import.meta.url); @@ -63,7 +63,7 @@ function createHTML(md) { .use(processStability) .processSync(md); - return file.contents.trim(); + return file.value.trim(); } function processStability() { From ec94bec9a3d00850fe0462a21395d06c9be2e794 Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 12 Sep 2021 13:36:06 +0200 Subject: [PATCH 83/95] test: do not run `test-corepack-yarn-install` with no internet MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40090 Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- test/{parallel => internet}/test-corepack-yarn-install.js | 1 + 1 file changed, 1 insertion(+) rename test/{parallel => internet}/test-corepack-yarn-install.js (98%) diff --git a/test/parallel/test-corepack-yarn-install.js b/test/internet/test-corepack-yarn-install.js similarity index 98% rename from test/parallel/test-corepack-yarn-install.js rename to test/internet/test-corepack-yarn-install.js index 7fe22387bcac64..476f6eea2077f0 100644 --- a/test/parallel/test-corepack-yarn-install.js +++ b/test/internet/test-corepack-yarn-install.js @@ -54,6 +54,7 @@ function handleExit(error, stdout, stderr) { const signalCode = error ? error.signal : null; if (code !== 0) { + process.stdout.write(stdout); process.stderr.write(stderr); } From 49933188621e6e54ab69377ae2b6d8b527399d39 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 16 Sep 2021 21:51:07 -0700 Subject: [PATCH 84/95] meta: update GeoffreyBooth email addresses in AUTHORS and .mailmap PR-URL: https://github.com/nodejs/node/pull/40132 Reviewed-By: Antoine du Hamel Reviewed-By: Geoffrey Booth --- .mailmap | 4 ++-- AUTHORS | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.mailmap b/.mailmap index 5cefacfcf2d4a8..9ee76141ef3af6 100644 --- a/.mailmap +++ b/.mailmap @@ -150,8 +150,8 @@ Gabriel de Perthuis Gareth Ellis Garwah Lam garygsc -Geoffrey Booth -Geoffrey Booth +Geoffrey Booth +Geoffrey Booth Geoffrey Bugaisky Gerhard Stöbich Gibson Fahnestock diff --git a/AUTHORS b/AUTHORS index 5ff0195965b9d5..602727b0a389e3 100644 --- a/AUTHORS +++ b/AUTHORS @@ -2484,7 +2484,7 @@ Loic Denis McDonald Arvind Pandey Jagannath Bhat -Geoffrey Booth +Geoffrey Booth mritunjaygoutam12 Esteban Sotillo Jerome Covington @@ -2827,7 +2827,6 @@ Aditya Denis Zavershinskiy Levhita claudiahdz -Geoffrey Booth Javier Ledezma Marian Rusnak <4215517+marian-r@users.noreply.github.com> Jenia From a07d8444f906689cdcf2b064966fd76964c1f8ba Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 16 Sep 2021 21:56:24 -0700 Subject: [PATCH 85/95] doc: fix comma splice MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40133 Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: Antoine du Hamel Reviewed-By: Michael Dawson --- doc/api/http.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/http.md b/doc/api/http.md index d0464a0b56d3c6..0cbfce0fbe67d9 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2492,7 +2492,7 @@ added: v0.9.12 * `msesc` {number} * `callback` {Function} Optional function to be called when a timeout -occurs, Same as binding to the `timeout` event. + occurs. Same as binding to the `timeout` event. * Returns: {this} Once a socket is associated with the message and is connected, From 590ace418d7b274b2f8be0482d00c563f2687df2 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 18 Sep 2021 12:03:19 -0700 Subject: [PATCH 86/95] tools,doc: fix misrendering of consecutive JS blocks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Our markdown-to-html tool was assuming that any consecutive JS blocks were ESM vs CJS alternatives, but that is not always the case, resulting in both a confusing interface and invalid HTML. PR-URL: https://github.com/nodejs/node/pull/40146 Reviewed-By: Michaël Zasso Reviewed-By: Tobias Nießen Reviewed-By: Antoine du Hamel --- tools/doc/html.mjs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/doc/html.mjs b/tools/doc/html.mjs index 4efe569d11f153..9eb8ef8f9b2f7e 100644 --- a/tools/doc/html.mjs +++ b/tools/doc/html.mjs @@ -227,7 +227,8 @@ export function preprocessElements({ filename }) { nextNode.lang !== node.lang) { // Saving the highlight code as value to be added in the next node. node.value = highlighted; - } else if (isJSFlavorSnippet(previousNode)) { + } else if (isJSFlavorSnippet(previousNode) && + previousNode.lang !== node.lang) { node.value = '
      ' +
                     '
      Date: Sat, 11 Sep 2021 14:55:09 +0300
      Subject: [PATCH 87/95] http: limit requests per connection
      
      Fixes: https://github.com/nodejs/node/issues/40071
      PR-URL: https://github.com/nodejs/node/pull/40082
      Reviewed-By: Matteo Collina 
      Reviewed-By: Robert Nagy 
      ---
       doc/api/http.md                               |  16 +++
       lib/_http_outgoing.js                         |   5 +-
       lib/_http_server.js                           |  51 +++++---
       .../test-http-keep-alive-max-requests.js      | 116 ++++++++++++++++++
       ...t-http-keep-alive-pipeline-max-requests.js |  86 +++++++++++++
       5 files changed, 258 insertions(+), 16 deletions(-)
       create mode 100644 test/parallel/test-http-keep-alive-max-requests.js
       create mode 100644 test/parallel/test-http-keep-alive-pipeline-max-requests.js
      
      diff --git a/doc/api/http.md b/doc/api/http.md
      index 0cbfce0fbe67d9..f41d9f74e27718 100644
      --- a/doc/api/http.md
      +++ b/doc/api/http.md
      @@ -1348,6 +1348,22 @@ By default, the Server does not timeout sockets. However, if a callback
       is assigned to the Server's `'timeout'` event, timeouts must be handled
       explicitly.
       
      +### `server.maxRequestsPerSocket`
      +
      +
      +* {number} Requests per socket. **Default:** null (no limit)
      +
      +The maximum number of requests socket can handle
      +before closing keep alive connection.
      +
      +A value of `null` will disable the limit.
      +
      +When limit is reach it will set `Connection` header value to `closed`,
      +but will not actually close the connection, subsequent requests sent
      +after the limit is reached will get `503 Service Unavailable` as a response.
      +
       ### `server.timeout`
       
      +
       

      . #### Current and LTS releases + The [latest](https://nodejs.org/download/release/latest/) directory is an @@ -79,6 +81,7 @@ alias for the latest release from an LTS line. For example, the contains the latest Fermium (Node.js 14) release. #### Nightly releases + Each directory name and filename contains a date (in UTC) and the commit @@ -154,51 +157,53 @@ For information about the governance of the Node.js project, see + ### TSC (Technical Steering Committee) + * [aduh95](https://github.com/aduh95) - -**Antoine du Hamel** <duhamelantoine1995@gmail.com> (he/him) + **Antoine du Hamel** \ (he/him) * [apapirovski](https://github.com/apapirovski) - -**Anatoli Papirovski** <apapirovski@mac.com> (he/him) + **Anatoli Papirovski** \ (he/him) * [BethGriggs](https://github.com/BethGriggs) - -**Beth Griggs** <bgriggs@redhat.com> (she/her) + **Beth Griggs** \ (she/her) * [BridgeAR](https://github.com/BridgeAR) - -**Ruben Bridgewater** <ruben@bridgewater.de> (he/him) + **Ruben Bridgewater** \ (he/him) * [ChALkeR](https://github.com/ChALkeR) - -**Сковорода Никита Андреевич** <chalkerx@gmail.com> (he/him) + **Сковорода Никита Андреевич** \ (he/him) * [cjihrig](https://github.com/cjihrig) - -**Colin Ihrig** <cjihrig@gmail.com> (he/him) + **Colin Ihrig** \ (he/him) * [codebytere](https://github.com/codebytere) - -**Shelley Vohr** <shelley.vohr@gmail.com> (she/her) + **Shelley Vohr** \ (she/her) * [danielleadams](https://github.com/danielleadams) - -**Danielle Adams** <adamzdanielle@gmail.com> (she/her) + **Danielle Adams** \ (she/her) * [fhinkel](https://github.com/fhinkel) - -**Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her) + **Franziska Hinkelmann** \ (she/her) * [gabrielschulhof](https://github.com/gabrielschulhof) - -**Gabriel Schulhof** <gabrielschulhof@gmail.com> + **Gabriel Schulhof** \ * [gireeshpunathil](https://github.com/gireeshpunathil) - -**Gireesh Punathil** <gpunathi@in.ibm.com> (he/him) + **Gireesh Punathil** \ (he/him) * [jasnell](https://github.com/jasnell) - -**James M Snell** <jasnell@gmail.com> (he/him) + **James M Snell** \ (he/him) * [joyeecheung](https://github.com/joyeecheung) - -**Joyee Cheung** <joyeec9h3@gmail.com> (she/her) + **Joyee Cheung** \ (she/her) * [mcollina](https://github.com/mcollina) - -**Matteo Collina** <matteo.collina@gmail.com> (he/him) + **Matteo Collina** \ (he/him) * [mhdawson](https://github.com/mhdawson) - -**Michael Dawson** <midawson@redhat.com> (he/him) + **Michael Dawson** \ (he/him) * [mmarchini](https://github.com/mmarchini) - -**Mary Marchini** <oss@mmarchini.me> (she/her) + **Mary Marchini** \ (she/her) * [MylesBorins](https://github.com/MylesBorins) - -**Myles Borins** <myles.borins@gmail.com> (he/him) + **Myles Borins** \ (he/him) * [ronag](https://github.com/ronag) - -**Robert Nagy** <ronagy@icloud.com> + **Robert Nagy** \ * [targos](https://github.com/targos) - -**Michaël Zasso** <targos@protonmail.com> (he/him) + **Michaël Zasso** \ (he/him) * [tniessen](https://github.com/tniessen) - -**Tobias Nießen** <tniessen@tnie.de> + **Tobias Nießen** \ * [Trott](https://github.com/Trott) - -**Rich Trott** <rtrott@gmail.com> (he/him) + **Rich Trott** \ (he/him)

      @@ -207,263 +212,264 @@ For information about the governance of the Node.js project, see ### TSC emeriti * [addaleax](https://github.com/addaleax) - -**Anna Henningsen** <anna@addaleax.net> (she/her) + **Anna Henningsen** \ (she/her) * [bnoordhuis](https://github.com/bnoordhuis) - -**Ben Noordhuis** <info@bnoordhuis.nl> + **Ben Noordhuis** \ * [chrisdickinson](https://github.com/chrisdickinson) - -**Chris Dickinson** <christopher.s.dickinson@gmail.com> + **Chris Dickinson** \ * [danbev](https://github.com/danbev) - -**Daniel Bevenius** <daniel.bevenius@gmail.com> (he/him) + **Daniel Bevenius** \ (he/him) * [evanlucas](https://github.com/evanlucas) - -**Evan Lucas** <evanlucas@me.com> (he/him) + **Evan Lucas** \ (he/him) * [Fishrock123](https://github.com/Fishrock123) - -**Jeremiah Senkpiel** <fishrock123@rocketmail.com> (he/they) + **Jeremiah Senkpiel** \ (he/they) * [gibfahn](https://github.com/gibfahn) - -**Gibson Fahnestock** <gibfahn@gmail.com> (he/him) + **Gibson Fahnestock** \ (he/him) * [indutny](https://github.com/indutny) - -**Fedor Indutny** <fedor@indutny.com> + **Fedor Indutny** \ * [isaacs](https://github.com/isaacs) - -**Isaac Z. Schlueter** <i@izs.me> + **Isaac Z. Schlueter** \ * [joshgav](https://github.com/joshgav) - -**Josh Gavant** <josh.gavant@outlook.com> + **Josh Gavant** \ * [mscdex](https://github.com/mscdex) - -**Brian White** <mscdex@mscdex.net> + **Brian White** \ * [nebrius](https://github.com/nebrius) - -**Bryan Hughes** <bryan@nebri.us> + **Bryan Hughes** \ * [ofrobots](https://github.com/ofrobots) - -**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) + **Ali Ijaz Sheikh** \ (he/him) * [orangemocha](https://github.com/orangemocha) - -**Alexis Campailla** <orangemocha@nodejs.org> + **Alexis Campailla** \ * [piscisaureus](https://github.com/piscisaureus) - -**Bert Belder** <bertbelder@gmail.com> + **Bert Belder** \ * [rvagg](https://github.com/rvagg) - -**Rod Vagg** <r@va.gg> + **Rod Vagg** \ * [sam-github](https://github.com/sam-github) - -**Sam Roberts** <vieuxtech@gmail.com> + **Sam Roberts** \ * [shigeki](https://github.com/shigeki) - -**Shigeki Ohtsu** <ohtsu@ohtsu.org> (he/him) + **Shigeki Ohtsu** \ (he/him) * [thefourtheye](https://github.com/thefourtheye) - -**Sakthipriyan Vairamani** <thechargingvolcano@gmail.com> (he/him) + **Sakthipriyan Vairamani** \ (he/him) * [TimothyGu](https://github.com/TimothyGu) - -**Tiancheng "Timothy" Gu** <timothygu99@gmail.com> (he/him) + **Tiancheng "Timothy" Gu** \ (he/him) * [trevnorris](https://github.com/trevnorris) - -**Trevor Norris** <trev.norris@gmail.com> + **Trevor Norris** \
      + ### Collaborators * [addaleax](https://github.com/addaleax) - -**Anna Henningsen** <anna@addaleax.net> (she/her) + **Anna Henningsen** \ (she/her) * [aduh95](https://github.com/aduh95) - -**Antoine du Hamel** <duhamelantoine1995@gmail.com> (he/him) + **Antoine du Hamel** \ (he/him) * [ak239](https://github.com/ak239) - -**Aleksei Koziatinskii** <ak239spb@gmail.com> + **Aleksei Koziatinskii** \ * [antsmartian](https://github.com/antsmartian) - -**Anto Aravinth** <anto.aravinth.cse@gmail.com> (he/him) + **Anto Aravinth** \ (he/him) * [apapirovski](https://github.com/apapirovski) - -**Anatoli Papirovski** <apapirovski@mac.com> (he/him) + **Anatoli Papirovski** \ (he/him) * [AshCripps](https://github.com/AshCripps) - -**Ash Cripps** <acripps@redhat.com> + **Ash Cripps** \ * [Ayase-252](https://github.com/Ayase-252) - -**Qingyu Deng** <i@ayase-lab.com> + **Qingyu Deng** \ * [bcoe](https://github.com/bcoe) - -**Ben Coe** <bencoe@gmail.com> (he/him) + **Ben Coe** \ (he/him) * [bengl](https://github.com/bengl) - -**Bryan English** <bryan@bryanenglish.com> (he/him) + **Bryan English** \ (he/him) * [benjamingr](https://github.com/benjamingr) - -**Benjamin Gruenbaum** <benjamingr@gmail.com> + **Benjamin Gruenbaum** \ * [BethGriggs](https://github.com/BethGriggs) - -**Beth Griggs** <bgriggs@redhat.com> (she/her) + **Beth Griggs** \ (she/her) * [bmeck](https://github.com/bmeck) - -**Bradley Farias** <bradley.meck@gmail.com> + **Bradley Farias** \ * [bmeurer](https://github.com/bmeurer) - -**Benedikt Meurer** <benedikt.meurer@gmail.com> + **Benedikt Meurer** \ * [boneskull](https://github.com/boneskull) - -**Christopher Hiller** <boneskull@boneskull.com> (he/him) + **Christopher Hiller** \ (he/him) * [BridgeAR](https://github.com/BridgeAR) - -**Ruben Bridgewater** <ruben@bridgewater.de> (he/him) + **Ruben Bridgewater** \ (he/him) * [bzoz](https://github.com/bzoz) - -**Bartosz Sosnowski** <bartosz@janeasystems.com> + **Bartosz Sosnowski** \ * [cclauss](https://github.com/cclauss) - -**Christian Clauss** <cclauss@me.com> (he/him) + **Christian Clauss** \ (he/him) * [ChALkeR](https://github.com/ChALkeR) - -**Сковорода Никита Андреевич** <chalkerx@gmail.com> (he/him) + **Сковорода Никита Андреевич** \ (he/him) * [cjihrig](https://github.com/cjihrig) - -**Colin Ihrig** <cjihrig@gmail.com> (he/him) + **Colin Ihrig** \ (he/him) * [codebytere](https://github.com/codebytere) - -**Shelley Vohr** <shelley.vohr@gmail.com> (she/her) + **Shelley Vohr** \ (she/her) * [danbev](https://github.com/danbev) - -**Daniel Bevenius** <daniel.bevenius@gmail.com> (he/him) + **Daniel Bevenius** \ (he/him) * [danielleadams](https://github.com/danielleadams) - -**Danielle Adams** <adamzdanielle@gmail.com> (she/her) + **Danielle Adams** \ (she/her) * [davisjam](https://github.com/davisjam) - -**Jamie Davis** <davisjam@vt.edu> (he/him) + **Jamie Davis** \ (he/him) * [DerekNonGeneric](https://github.com/DerekNonGeneric) - -**Derek Lewis** <DerekNonGeneric@inf.is> (he/him) + **Derek Lewis** \ (he/him) * [devnexen](https://github.com/devnexen) - -**David Carlier** <devnexen@gmail.com> + **David Carlier** \ * [devsnek](https://github.com/devsnek) - -**Gus Caplan** <me@gus.host> (they/them) + **Gus Caplan** \ (they/them) * [dmabupt](https://github.com/dmabupt) - -**Xu Meng** <dmabupt@gmail.com> (he/him) + **Xu Meng** \ (he/him) * [dnlup](https://github.com/dnlup) -**Daniele Belardi** <dwon.dnl@gmail.com> (he/him) + **Daniele Belardi** \ (he/him) * [edsadr](https://github.com/edsadr) - -**Adrian Estrada** <edsadr@gmail.com> (he/him) + **Adrian Estrada** \ (he/him) * [eugeneo](https://github.com/eugeneo) - -**Eugene Ostroukhov** <eostroukhov@google.com> + **Eugene Ostroukhov** \ * [evanlucas](https://github.com/evanlucas) - -**Evan Lucas** <evanlucas@me.com> (he/him) + **Evan Lucas** \ (he/him) * [fhinkel](https://github.com/fhinkel) - -**Franziska Hinkelmann** <franziska.hinkelmann@gmail.com> (she/her) + **Franziska Hinkelmann** \ (she/her) * [Fishrock123](https://github.com/Fishrock123) - -**Jeremiah Senkpiel** <fishrock123@rocketmail.com> (he/they) + **Jeremiah Senkpiel** \ (he/they) * [Flarna](https://github.com/Flarna) - -**Gerhard Stöbich** <deb2001-github@yahoo.de> (he/they) + **Gerhard Stöbich** \ (he/they) * [gabrielschulhof](https://github.com/gabrielschulhof) - -**Gabriel Schulhof** <gabrielschulhof@gmail.com> + **Gabriel Schulhof** \ * [geek](https://github.com/geek) - -**Wyatt Preul** <wpreul@gmail.com> + **Wyatt Preul** \ * [gengjiawen](https://github.com/gengjiawen) - -**Jiawen Geng** <technicalcute@gmail.com> + **Jiawen Geng** \ * [GeoffreyBooth](https://github.com/geoffreybooth) - -**Geoffrey Booth** <webadmin@geoffreybooth.com> (he/him) + **Geoffrey Booth** \ (he/him) * [gireeshpunathil](https://github.com/gireeshpunathil) - -**Gireesh Punathil** <gpunathi@in.ibm.com> (he/him) + **Gireesh Punathil** \ (he/him) * [guybedford](https://github.com/guybedford) - -**Guy Bedford** <guybedford@gmail.com> (he/him) + **Guy Bedford** \ (he/him) * [HarshithaKP](https://github.com/HarshithaKP) - -**Harshitha K P** <harshitha014@gmail.com> (she/her) + **Harshitha K P** \ (she/her) * [hashseed](https://github.com/hashseed) - -**Yang Guo** <yangguo@chromium.org> (he/him) + **Yang Guo** \ (he/him) * [himself65](https://github.com/himself65) - -**Zeyu Yang** <himself65@outlook.com> (he/him) + **Zeyu Yang** \ (he/him) * [hiroppy](https://github.com/hiroppy) - -**Yuta Hiroto** <hello@hiroppy.me> (he/him) + **Yuta Hiroto** \ (he/him) * [iansu](https://github.com/iansu) - -**Ian Sutherland** <ian@iansutherland.ca> + **Ian Sutherland** \ * [indutny](https://github.com/indutny) - -**Fedor Indutny** <fedor@indutny.com> + **Fedor Indutny** \ * [JacksonTian](https://github.com/JacksonTian) - -**Jackson Tian** <shyvo1987@gmail.com> + **Jackson Tian** \ * [jasnell](https://github.com/jasnell) - -**James M Snell** <jasnell@gmail.com> (he/him) + **James M Snell** \ (he/him) * [jkrems](https://github.com/jkrems) - -**Jan Krems** <jan.krems@gmail.com> (he/him) + **Jan Krems** \ (he/him) * [joaocgreis](https://github.com/joaocgreis) - -**João Reis** <reis@janeasystems.com> + **João Reis** \ * [joyeecheung](https://github.com/joyeecheung) - -**Joyee Cheung** <joyeec9h3@gmail.com> (she/her) + **Joyee Cheung** \ (she/her) * [juanarbol](https://github.com/juanarbol) - -**Juan José Arboleda** <soyjuanarbol@gmail.com> (he/him) + **Juan José Arboleda** \ (he/him) * [JungMinu](https://github.com/JungMinu) - -**Minwoo Jung** <nodecorelab@gmail.com> (he/him) + **Minwoo Jung** \ (he/him) * [legendecas](https://github.com/legendecas) - -**Chengzhong Wu** <legendecas@gmail.com> (he/him) + **Chengzhong Wu** \ (he/him) * [Leko](https://github.com/Leko) - -**Shingo Inoue** <leko.noor@gmail.com> (he/him) + **Shingo Inoue** \ (he/him) * [linkgoron](https://github.com/linkgoron) - -**Nitzan Uziely** <linkgoron@gmail.com> + **Nitzan Uziely** \ * [lpinca](https://github.com/lpinca) - -**Luigi Pinca** <luigipinca@gmail.com> (he/him) + **Luigi Pinca** \ (he/him) * [lundibundi](https://github.com/lundibundi) - -**Denys Otrishko** <shishugi@gmail.com> (he/him) + **Denys Otrishko** \ (he/him) * [Lxxyx](https://github.com/Lxxyx) - -**Zijian Liu** <lxxyxzj@gmail.com> (he/him) + **Zijian Liu** \ (he/him) * [mafintosh](https://github.com/mafintosh) - -**Mathias Buus** <mathiasbuus@gmail.com> (he/him) + **Mathias Buus** \ (he/him) * [mcollina](https://github.com/mcollina) - -**Matteo Collina** <matteo.collina@gmail.com> (he/him) + **Matteo Collina** \ (he/him) * [mhdawson](https://github.com/mhdawson) - -**Michael Dawson** <midawson@redhat.com> (he/him) + **Michael Dawson** \ (he/him) * [miladfarca](https://github.com/miladfarca) - -**Milad Fa** <mfarazma@redhat.com> (he/him) + **Milad Fa** \ (he/him) * [mildsunrise](https://github.com/mildsunrise) - -**Alba Mendez** <me@alba.sh> (she/her) + **Alba Mendez** \ (she/her) * [misterdjules](https://github.com/misterdjules) - -**Julien Gilli** <jgilli@netflix.com> + **Julien Gilli** \ * [mmarchini](https://github.com/mmarchini) - -**Mary Marchini** <oss@mmarchini.me> (she/her) + **Mary Marchini** \ (she/her) * [mscdex](https://github.com/mscdex) - -**Brian White** <mscdex@mscdex.net> + **Brian White** \ * [MylesBorins](https://github.com/MylesBorins) - -**Myles Borins** <myles.borins@gmail.com> (he/him) + **Myles Borins** \ (he/him) * [oyyd](https://github.com/oyyd) - -**Ouyang Yadong** <oyydoibh@gmail.com> (he/him) + **Ouyang Yadong** \ (he/him) * [panva](https://github.com/panva) - -**Filip Skokan** <panva.ip@gmail.com> + **Filip Skokan** \ * [PoojaDurgad](https://github.com/PoojaDurgad) - -**Pooja D P** <Pooja.D.P@ibm.com> (she/her) + **Pooja D P** \ (she/her) * [puzpuzpuz](https://github.com/puzpuzpuz) - -**Andrey Pechkurov** <apechkurov@gmail.com> (he/him) + **Andrey Pechkurov** \ (he/him) * [Qard](https://github.com/Qard) - -**Stephen Belanger** <admin@stephenbelanger.com> (he/him) + **Stephen Belanger** \ (he/him) * [RaisinTen](https://github.com/RaisinTen) - -**Darshan Sen** <raisinten@gmail.com> (he/him) + **Darshan Sen** \ (he/him) * [refack](https://github.com/refack) - -**Refael Ackermann (רפאל פלחי)** <refack@gmail.com> (he/him/הוא/אתה) + **Refael Ackermann (רפאל פלחי)** \ (he/him/הוא/אתה) * [rexagod](https://github.com/rexagod) - -**Pranshu Srivastava** <rexagod@gmail.com> (he/him) + **Pranshu Srivastava** \ (he/him) * [richardlau](https://github.com/richardlau) - -**Richard Lau** <rlau@redhat.com> + **Richard Lau** \ * [rickyes](https://github.com/rickyes) - -**Ricky Zhou** <0x19951125@gmail.com> (he/him) + **Ricky Zhou** \<0x19951125@gmail.com> (he/him) * [ronag](https://github.com/ronag) - -**Robert Nagy** <ronagy@icloud.com> + **Robert Nagy** \ * [ruyadorno](https://github.com/ruyadorno) - -**Ruy Adorno** <ruyadorno@github.com> (he/him) + **Ruy Adorno** \ (he/him) * [rvagg](https://github.com/rvagg) - -**Rod Vagg** <rod@vagg.org> + **Rod Vagg** \ * [ryzokuken](https://github.com/ryzokuken) - -**Ujjwal Sharma** <ryzokuken@disroot.org> (he/him) + **Ujjwal Sharma** \ (he/him) * [saghul](https://github.com/saghul) - -**Saúl Ibarra Corretgé** <s@saghul.net> + **Saúl Ibarra Corretgé** \ * [santigimeno](https://github.com/santigimeno) - -**Santiago Gimeno** <santiago.gimeno@gmail.com> + **Santiago Gimeno** \ * [seishun](https://github.com/seishun) - -**Nikolai Vavilov** <vvnicholas@gmail.com> + **Nikolai Vavilov** \ * [shisama](https://github.com/shisama) - -**Masashi Hirano** <shisama07@gmail.com> (he/him) + **Masashi Hirano** \ (he/him) * [silverwind](https://github.com/silverwind) - -**Roman Reiss** <me@silverwind.io> + **Roman Reiss** \ * [srl295](https://github.com/srl295) - -**Steven R Loomis** <srloomis@us.ibm.com> + **Steven R Loomis** \ * [starkwang](https://github.com/starkwang) - -**Weijia Wang** <starkwang@126.com> + **Weijia Wang** \ * [sxa](https://github.com/sxa) - -**Stewart X Addison** <sxa@redhat.com> (he/him) + **Stewart X Addison** \ (he/him) * [targos](https://github.com/targos) - -**Michaël Zasso** <targos@protonmail.com> (he/him) + **Michaël Zasso** \ (he/him) * [TimothyGu](https://github.com/TimothyGu) - -**Tiancheng "Timothy" Gu** <timothygu99@gmail.com> (he/him) + **Tiancheng "Timothy" Gu** \ (he/him) * [tniessen](https://github.com/tniessen) - -**Tobias Nießen** <tniessen@tnie.de> + **Tobias Nießen** \ * [trivikr](https://github.com/trivikr) - -**Trivikram Kamat** <trivikr.dev@gmail.com> + **Trivikram Kamat** \ * [Trott](https://github.com/Trott) - -**Rich Trott** <rtrott@gmail.com> (he/him) + **Rich Trott** \ (he/him) * [vdeturckheim](https://github.com/vdeturckheim) - -**Vladimir de Turckheim** <vlad2t@hotmail.com> (he/him) + **Vladimir de Turckheim** \ (he/him) * [watilde](https://github.com/watilde) - -**Daijiro Wachi** <daijiro.wachi@gmail.com> (he/him) + **Daijiro Wachi** \ (he/him) * [watson](https://github.com/watson) - -**Thomas Watson** <w@tson.dk> + **Thomas Watson** \ * [XadillaX](https://github.com/XadillaX) - -**Khaidi Chu** <i@2333.moe> (he/him) + **Khaidi Chu** \ (he/him) * [yashLadha](https://github.com/yashLadha) - -**Yash Ladha** <yash@yashladha.in> (he/him) + **Yash Ladha** \ (he/him) * [yhwang](https://github.com/yhwang) - -**Yihong Wang** <yh.wang@ibm.com> + **Yihong Wang** \ * [yorkie](https://github.com/yorkie) - -**Yorkie Liu** <yorkiefixer@gmail.com> + **Yorkie Liu** \ * [yosuke-furukawa](https://github.com/yosuke-furukawa) - -**Yosuke Furukawa** <yosuke.furukawa@gmail.com> + **Yosuke Furukawa** \ * [ZYSzys](https://github.com/ZYSzys) - -**Yongsheng Zhang** <zyszys98@gmail.com> (he/him) + **Yongsheng Zhang** \ (he/him)
      @@ -471,146 +477,147 @@ For information about the governance of the Node.js project, see + ### Collaborator emeriti * [andrasq](https://github.com/andrasq) - -**Andras** <andras@kinvey.com> + **Andras** \ * [AnnaMag](https://github.com/AnnaMag) - -**Anna M. Kedzierska** <anna.m.kedzierska@gmail.com> + **Anna M. Kedzierska** \ * [AndreasMadsen](https://github.com/AndreasMadsen) - -**Andreas Madsen** <amwebdk@gmail.com> (he/him) + **Andreas Madsen** \ (he/him) * [aqrln](https://github.com/aqrln) - -**Alexey Orlenko** <eaglexrlnk@gmail.com> (he/him) + **Alexey Orlenko** \ (he/him) * [bnoordhuis](https://github.com/bnoordhuis) - -**Ben Noordhuis** <info@bnoordhuis.nl> + **Ben Noordhuis** \ * [brendanashworth](https://github.com/brendanashworth) - -**Brendan Ashworth** <brendan.ashworth@me.com> + **Brendan Ashworth** \ * [calvinmetcalf](https://github.com/calvinmetcalf) - -**Calvin Metcalf** <calvin.metcalf@gmail.com> + **Calvin Metcalf** \ * [chrisdickinson](https://github.com/chrisdickinson) - -**Chris Dickinson** <christopher.s.dickinson@gmail.com> + **Chris Dickinson** \ * [claudiorodriguez](https://github.com/claudiorodriguez) - -**Claudio Rodriguez** <cjrodr@yahoo.com> + **Claudio Rodriguez** \ * [DavidCai1993](https://github.com/DavidCai1993) - -**David Cai** <davidcai1993@yahoo.com> (he/him) + **David Cai** \ (he/him) * [digitalinfinity](https://github.com/digitalinfinity) - -**Hitesh Kanwathirtha** <digitalinfinity@gmail.com> (he/him) + **Hitesh Kanwathirtha** \ (he/him) * [eljefedelrodeodeljefe](https://github.com/eljefedelrodeodeljefe) - -**Robert Jefe Lindstaedt** <robert.lindstaedt@gmail.com> + **Robert Jefe Lindstaedt** \ * [estliberitas](https://github.com/estliberitas) - -**Alexander Makarenko** <estliberitas@gmail.com> + **Alexander Makarenko** \ * [firedfox](https://github.com/firedfox) - -**Daniel Wang** <wangyang0123@gmail.com> + **Daniel Wang** \ * [gdams](https://github.com/gdams) - -**George Adams** <george.adams@microsoft.com> (he/him) + **George Adams** \ (he/him) * [gibfahn](https://github.com/gibfahn) - -**Gibson Fahnestock** <gibfahn@gmail.com> (he/him) + **Gibson Fahnestock** \ (he/him) * [glentiki](https://github.com/glentiki) - -**Glen Keane** <glenkeane.94@gmail.com> (he/him) + **Glen Keane** \ (he/him) * [iarna](https://github.com/iarna) - -**Rebecca Turner** <me@re-becca.org> + **Rebecca Turner** \ * [imran-iq](https://github.com/imran-iq) - -**Imran Iqbal** <imran@imraniqbal.org> + **Imran Iqbal** \ * [imyller](https://github.com/imyller) - -**Ilkka Myller** <ilkka.myller@nodefield.com> + **Ilkka Myller** \ * [isaacs](https://github.com/isaacs) - -**Isaac Z. Schlueter** <i@izs.me> + **Isaac Z. Schlueter** \ * [italoacasas](https://github.com/italoacasas) - -**Italo A. Casas** <me@italoacasas.com> (he/him) + **Italo A. Casas** \ (he/him) * [jasongin](https://github.com/jasongin) - -**Jason Ginchereau** <jasongin@microsoft.com> + **Jason Ginchereau** \ * [jbergstroem](https://github.com/jbergstroem) - -**Johan Bergström** <bugs@bergstroem.nu> + **Johan Bergström** \ * [jdalton](https://github.com/jdalton) - -**John-David Dalton** <john.david.dalton@gmail.com> + **John-David Dalton** \ * [jhamhader](https://github.com/jhamhader) - -**Yuval Brik** <yuval@brik.org.il> + **Yuval Brik** \ * [joshgav](https://github.com/joshgav) - -**Josh Gavant** <josh.gavant@outlook.com> + **Josh Gavant** \ * [julianduque](https://github.com/julianduque) - -**Julian Duque** <julianduquej@gmail.com> (he/him) + **Julian Duque** \ (he/him) * [kfarnung](https://github.com/kfarnung) - -**Kyle Farnung** <kfarnung@microsoft.com> (he/him) + **Kyle Farnung** \ (he/him) * [kunalspathak](https://github.com/kunalspathak) - -**Kunal Pathak** <kunal.pathak@microsoft.com> + **Kunal Pathak** \ * [lance](https://github.com/lance) - -**Lance Ball** <lball@redhat.com> (he/him) + **Lance Ball** \ (he/him) * [lucamaraschi](https://github.com/lucamaraschi) - -**Luca Maraschi** <luca.maraschi@gmail.com> (he/him) + **Luca Maraschi** \ (he/him) * [lxe](https://github.com/lxe) - -**Aleksey Smolenchuk** <lxe@lxe.co> + **Aleksey Smolenchuk** \ * [maclover7](https://github.com/maclover7) - -**Jon Moss** <me@jonathanmoss.me> (he/him) + **Jon Moss** \ (he/him) * [matthewloring](https://github.com/matthewloring) - -**Matthew Loring** <mattloring@google.com> + **Matthew Loring** \ * [micnic](https://github.com/micnic) - -**Nicu Micleușanu** <micnic90@gmail.com> (he/him) + **Nicu Micleușanu** \ (he/him) * [mikeal](https://github.com/mikeal) - -**Mikeal Rogers** <mikeal.rogers@gmail.com> + **Mikeal Rogers** \ * [monsanto](https://github.com/monsanto) - -**Christopher Monsanto** <chris@monsan.to> + **Christopher Monsanto** \ * [MoonBall](https://github.com/MoonBall) - -**Chen Gang** <gangc.cxy@foxmail.com> + **Chen Gang** \ * [not-an-aardvark](https://github.com/not-an-aardvark) - -**Teddy Katz** <teddy.katz@gmail.com> (he/him) + **Teddy Katz** \ (he/him) * [ofrobots](https://github.com/ofrobots) - -**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) + **Ali Ijaz Sheikh** \ (he/him) * [Olegas](https://github.com/Olegas) - -**Oleg Elifantiev** <oleg@elifantiev.ru> + **Oleg Elifantiev** \ * [orangemocha](https://github.com/orangemocha) - -**Alexis Campailla** <orangemocha@nodejs.org> + **Alexis Campailla** \ * [othiym23](https://github.com/othiym23) - -**Forrest L Norvell** <ogd@aoaioxxysz.net> (he/him) + **Forrest L Norvell** \ (he/him) * [petkaantonov](https://github.com/petkaantonov) - -**Petka Antonov** <petka_antonov@hotmail.com> + **Petka Antonov** \ * [phillipj](https://github.com/phillipj) - -**Phillip Johnsen** <johphi@gmail.com> + **Phillip Johnsen** \ * [piscisaureus](https://github.com/piscisaureus) - -**Bert Belder** <bertbelder@gmail.com> + **Bert Belder** \ * [pmq20](https://github.com/pmq20) - -**Minqi Pan** <pmq2001@gmail.com> + **Minqi Pan** \ * [princejwesley](https://github.com/princejwesley) - -**Prince John Wesley** <princejohnwesley@gmail.com> + **Prince John Wesley** \ * [psmarshall](https://github.com/psmarshall) - -**Peter Marshall** <petermarshall@chromium.org> (he/him) + **Peter Marshall** \ (he/him) * [rlidwka](https://github.com/rlidwka) - -**Alex Kocharin** <alex@kocharin.ru> + **Alex Kocharin** \ * [rmg](https://github.com/rmg) - -**Ryan Graham** <r.m.graham@gmail.com> + **Ryan Graham** \ * [robertkowalski](https://github.com/robertkowalski) - -**Robert Kowalski** <rok@kowalski.gd> + **Robert Kowalski** \ * [romankl](https://github.com/romankl) - -**Roman Klauke** <romaaan.git@gmail.com> + **Roman Klauke** \ * [ronkorving](https://github.com/ronkorving) - -**Ron Korving** <ron@ronkorving.nl> + **Ron Korving** \ * [RReverser](https://github.com/RReverser) - -**Ingvar Stepanyan** <me@rreverser.com> + **Ingvar Stepanyan** \ * [rubys](https://github.com/rubys) - -**Sam Ruby** <rubys@intertwingly.net> + **Sam Ruby** \ * [sam-github](https://github.com/sam-github) - -**Sam Roberts** <vieuxtech@gmail.com> + **Sam Roberts** \ * [sebdeckers](https://github.com/sebdeckers) - -**Sebastiaan Deckers** <sebdeckers83@gmail.com> + **Sebastiaan Deckers** \ * [shigeki](https://github.com/shigeki) - -**Shigeki Ohtsu** <ohtsu@ohtsu.org> (he/him) + **Shigeki Ohtsu** \ (he/him) * [stefanmb](https://github.com/stefanmb) - -**Stefan Budeanu** <stefan@budeanu.com> + **Stefan Budeanu** \ * [tellnes](https://github.com/tellnes) - -**Christian Tellnes** <christian@tellnes.no> + **Christian Tellnes** \ * [thefourtheye](https://github.com/thefourtheye) - -**Sakthipriyan Vairamani** <thechargingvolcano@gmail.com> (he/him) + **Sakthipriyan Vairamani** \ (he/him) * [thlorenz](https://github.com/thlorenz) - -**Thorsten Lorenz** <thlorenz@gmx.de> + **Thorsten Lorenz** \ * [trevnorris](https://github.com/trevnorris) - -**Trevor Norris** <trev.norris@gmail.com> + **Trevor Norris** \ * [tunniclm](https://github.com/tunniclm) - -**Mike Tunnicliffe** <m.j.tunnicliffe@gmail.com> + **Mike Tunnicliffe** \ * [vkurchatkin](https://github.com/vkurchatkin) - -**Vladimir Kurchatkin** <vladimir.kurchatkin@gmail.com> + **Vladimir Kurchatkin** \ * [vsemozhetbyt](https://github.com/vsemozhetbyt) - -**Vse Mozhet Byt** <vsemozhetbyt@gmail.com> (he/him) + **Vse Mozhet Byt** \ (he/him) * [whitlockjc](https://github.com/whitlockjc) - -**Jeremy Whitlock** <jwhitlock@apache.org> + **Jeremy Whitlock** \
      @@ -621,48 +628,48 @@ maintaining the Node.js project. ### Triagers * [Ayase-252](https://github.com/Ayase-252) - -**Qingyu Deng** <i@ayase-lab.com> + **Qingyu Deng** \ * [himadriganguly](https://github.com/himadriganguly) - -**Himadri Ganguly** <himadri.tech@gmail.com> (he/him) + **Himadri Ganguly** \ (he/him) * [iam-frankqiu](https://github.com/iam-frankqiu) - -**Frank Qiu** <iam.frankqiu@gmail.com> (he/him) + **Frank Qiu** \ (he/him) * [marsonya](https://github.com/marsonya) - -**Akhil Marsonya** <akhil.marsonya27@gmail.com> (he/him) + **Akhil Marsonya** \ (he/him) * [Mesteery](https://github.com/Mesteery) - -**Mestery** <mestery@pm.me> + **Mestery** \ * [PoojaDurgad](https://github.com/PoojaDurgad) - -**Pooja Durgad** <Pooja.D.P@ibm.com> + **Pooja Durgad** \ * [RaisinTen](https://github.com/RaisinTen) - -**Darshan Sen** <raisinten@gmail.com> + **Darshan Sen** \ * [VoltrexMaster](https://github.com/VoltrexMaster) - -**Voltrex** <mohammadkeyvanzade94@gmail.com> (he/him) + **Voltrex** \ (he/him) ### Release keys Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): -* **Beth Griggs** <bgriggs@redhat.com> -`4ED778F539E3634C779C87C6D7062848A1AB005C` -* **Colin Ihrig** <cjihrig@gmail.com> -`94AE36675C464D64BAFA68DD7434390BDBE9B9C5` -* **Danielle Adams** <adamzdanielle@gmail.com> -`74F12602B6F1C4E913FAA37AD3A89613643B6201` -* **James M Snell** <jasnell@keybase.io> -`71DCFD284A79C3B38668286BC97EC7A07EDE3FC1` -* **Michaël Zasso** <targos@protonmail.com> -`8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600` -* **Myles Borins** <myles.borins@gmail.com> -`C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8` -* **Richard Lau** <rlau@redhat.com> -`C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C` -* **Rod Vagg** <rod@vagg.org> -`DD8F2338BAE7501E3DD5AC78C273792F7D83545D` -* **Ruben Bridgewater** <ruben@bridgewater.de> -`A48C2BEE680E841632CD4E44F07496B3EB3C1762` -* **Ruy Adorno** <ruyadorno@hotmail.com> -`108F52B48DB57BB0CC439B2997B01419BD92F80A` -* **Shelley Vohr** <shelley.vohr@gmail.com> -`B9E2F5981AA6E0CD28160D9FF13993A75599653C` +* **Beth Griggs** \ + `4ED778F539E3634C779C87C6D7062848A1AB005C` +* **Colin Ihrig** \ + `94AE36675C464D64BAFA68DD7434390BDBE9B9C5` +* **Danielle Adams** \ + `74F12602B6F1C4E913FAA37AD3A89613643B6201` +* **James M Snell** \ + `71DCFD284A79C3B38668286BC97EC7A07EDE3FC1` +* **Michaël Zasso** \ + `8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600` +* **Myles Borins** \ + `C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8` +* **Richard Lau** \ + `C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C` +* **Rod Vagg** \ + `DD8F2338BAE7501E3DD5AC78C273792F7D83545D` +* **Ruben Bridgewater** \ + `A48C2BEE680E841632CD4E44F07496B3EB3C1762` +* **Ruy Adorno** \ + `108F52B48DB57BB0CC439B2997B01419BD92F80A` +* **Shelley Vohr** \ + `B9E2F5981AA6E0CD28160D9FF13993A75599653C` To import the full set of trusted release keys (including subkeys possibly used to sign releases): @@ -688,24 +695,24 @@ use these keys to verify a downloaded file. Other keys used to sign some previous releases -* **Chris Dickinson** <christopher.s.dickinson@gmail.com> -`9554F04D7259F04124DE6B476D5A82AC7E37093B` -* **Danielle Adams** <adamzdanielle@gmail.com> -`1C050899334244A8AF75E53792EF661D867B9DFA` -* **Evan Lucas** <evanlucas@me.com> -`B9AE9905FFD7803F25714661B63B535A4C206CA9` -* **Gibson Fahnestock** <gibfahn@gmail.com> -`77984A986EBC2AA786BC0F66B01FBB92821C587A` -* **Isaac Z. Schlueter** <i@izs.me> -`93C7E9E91B49E432C2F75674B0A78B0A6C481CF6` -* **Italo A. Casas** <me@italoacasas.com> -`56730D5401028683275BD23C23EFEFE93C4CFFFE` -* **Jeremiah Senkpiel** <fishrock@keybase.io> -`FD3A5288F042B6850C66B31F09FE44734EB7990E` -* **Julien Gilli** <jgilli@fastmail.fm> -`114F43EE0176B71C7BC219DD50A3051F888C628D` -* **Timothy J Fontaine** <tjfontaine@gmail.com> -`7937DFD2AB06298B2293C3187D33FF9D0246406D` +* **Chris Dickinson** \ + `9554F04D7259F04124DE6B476D5A82AC7E37093B` +* **Danielle Adams** \ + `1C050899334244A8AF75E53792EF661D867B9DFA` +* **Evan Lucas** \ + `B9AE9905FFD7803F25714661B63B535A4C206CA9` +* **Gibson Fahnestock** \ + `77984A986EBC2AA786BC0F66B01FBB92821C587A` +* **Isaac Z. Schlueter** \ + `93C7E9E91B49E432C2F75674B0A78B0A6C481CF6` +* **Italo A. Casas** \ + `56730D5401028683275BD23C23EFEFE93C4CFFFE` +* **Jeremiah Senkpiel** \ + `FD3A5288F042B6850C66B31F09FE44734EB7990E` +* **Julien Gilli** \ + `114F43EE0176B71C7BC219DD50A3051F888C628D` +* **Timothy J Fontaine** \ + `7937DFD2AB06298B2293C3187D33FF9D0246406D` diff --git a/tools/find-inactive-collaborators.mjs b/tools/find-inactive-collaborators.mjs index 9f7ac239d2df01..5f0173564b69af 100755 --- a/tools/find-inactive-collaborators.mjs +++ b/tools/find-inactive-collaborators.mjs @@ -77,8 +77,8 @@ async function getCollaboratorsFromReadme() { if (line === '### Collaborators') { foundCollaboratorHeading = true; } - if (line.startsWith('**') && isCollaborator) { - const [, name, email] = /^\*\*([^*]+)\*\* <(.+)>/.exec(line); + if (line.startsWith(' **') && isCollaborator) { + const [, name, email] = /^ \*\*([^*]+)\*\* \\<(.+)>/.exec(line); const mailmap = await runGitCommand( `git check-mailmap '${name} <${email}>'` ); @@ -141,8 +141,8 @@ async function moveCollaboratorToEmeritus(peopleToMove) { if (isCollaborator) { if (line.startsWith('* ')) { collaboratorFirstLine = line; - } else if (line.startsWith('**')) { - const [, name, email] = /^\*\*([^*]+)\*\* <(.+)>/.exec(line); + } else if (line.startsWith(' **')) { + const [, name, email] = /^ \*\*([^*]+)\*\* \\<(.+)>/.exec(line); if (peopleToMove.some((entry) => { return entry.name === name && entry.email === email; })) { @@ -158,7 +158,7 @@ async function moveCollaboratorToEmeritus(peopleToMove) { if (isCollaboratorEmeritus) { if (line.startsWith('* ')) { collaboratorFirstLine = line; - } else if (line.startsWith('**')) { + } else if (line.startsWith(' **')) { const currentLine = `${collaboratorFirstLine}\n${line}\n`; // If textToMove is empty, this still works because when undefined is // used in a comparison with <, the result is always false. From 6be405bd7bf1d23dd1d6d1b795b83a226f638f4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sun, 5 Sep 2021 17:38:36 +0200 Subject: [PATCH 90/95] test: fix test-dgram-udp6-link-local-address on Windows PR-URL: https://github.com/nodejs/node/pull/40005 Reviewed-By: Rich Trott --- test/parallel/test-dgram-udp6-link-local-address.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/parallel/test-dgram-udp6-link-local-address.js b/test/parallel/test-dgram-udp6-link-local-address.js index bb7307de2430db..5c090acc6b9e11 100644 --- a/test/parallel/test-dgram-udp6-link-local-address.js +++ b/test/parallel/test-dgram-udp6-link-local-address.js @@ -7,6 +7,8 @@ const assert = require('assert'); const dgram = require('dgram'); const os = require('os'); +const { isWindows } = common; + function linklocal() { for (const [ifname, entries] of Object.entries(os.networkInterfaces())) { for (const { address, family, scopeid } of entries) { @@ -21,7 +23,7 @@ const iface = linklocal(); if (!iface) common.skip('cannot find any IPv6 interfaces with a link local address'); -const address = `${iface.address}%${iface.ifname}`; +const address = isWindows ? iface.address : `${iface.address}%${iface.ifname}`; const message = 'Hello, local world!'; // Create a client socket for sending to the link-local address. @@ -42,7 +44,7 @@ server.on('message', common.mustCall((buf, info) => { // including the link local scope identifier. assert.strictEqual( info.address, - common.isWindows ? `${iface.address}%${iface.scopeid}` : address + isWindows ? `${iface.address}%${iface.scopeid}` : address ); server.close(); client.close(); From 0254b4b0d34e38537c5f271e2cb87693ac7c1452 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 18 Sep 2021 16:49:12 +0200 Subject: [PATCH 91/95] doc: fix markdown indentation in lists MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40142 Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Rich Trott Reviewed-By: Darshan Sen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/api/process.md | 2 +- doc/api/tls.md | 2 ++ doc/api/tty.md | 3 +-- doc/api/vm.md | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/api/process.md b/doc/api/process.md index 3243b23da555a3..806bb66685ce67 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -2596,7 +2596,7 @@ tarball. that are no longer supported). * `'Dubnium'` for the 10.x LTS line beginning with 10.13.0. * `'Erbium'` for the 12.x LTS line beginning with 12.13.0. - For other LTS Release code names, see [Node.js Changelog Archive](https://github.com/nodejs/node/blob/HEAD/doc/changelogs/CHANGELOG_ARCHIVE.md) + For other LTS Release code names, see [Node.js Changelog Archive](https://github.com/nodejs/node/blob/HEAD/doc/changelogs/CHANGELOG_ARCHIVE.md) ```js diff --git a/doc/api/tls.md b/doc/api/tls.md index 2b4c6bf111f3da..18819287a349ee 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -1493,6 +1493,7 @@ changes: or `null` to stop the negotiation process. `psk` must be compatible with the selected cipher's digest. `identity` must use UTF-8 encoding. + When negotiating TLS-PSK (pre-shared keys), this function is called with optional identity `hint` provided by the server or `null` in case of TLS 1.3 where `hint` was removed. @@ -1928,6 +1929,7 @@ changes: * Returns: {Buffer|TypedArray|DataView} pre-shared key that must either be a buffer or `null` to stop the negotiation process. Returned PSK must be compatible with the selected cipher's digest. + When negotiating TLS-PSK (pre-shared keys), this function is called with the identity provided by the client. If the return value is `null` the negotiation process will stop and an diff --git a/doc/api/tty.md b/doc/api/tty.md index df050c8ae9e96f..2a978c081f7854 100644 --- a/doc/api/tty.md +++ b/doc/api/tty.md @@ -184,8 +184,7 @@ Returns: * `1` for 2, * `4` for 16, * `8` for 256, -* `24` for 16,777,216 -colors supported. +* `24` for 16,777,216 colors supported. Use this to determine what colors the terminal supports. Due to the nature of colors in terminals it is possible to either have false positives or false diff --git a/doc/api/vm.md b/doc/api/vm.md index 49297ada3c65e9..586e1f7bedea6c 100644 --- a/doc/api/vm.md +++ b/doc/api/vm.md @@ -798,7 +798,7 @@ added: * `evaluateCallback` {Function} Called when the module is evaluated. * `options` * `identifier` {string} String used in stack traces. - **Default:** `'vm:module(i)'` where `i` is a context-specific ascending + **Default:** `'vm:module(i)'` where `i` is a context-specific ascending index. * `context` {Object} The [contextified][] object as returned by the `vm.createContext()` method, to compile and evaluate this `Module` in. From 85206b731107a830f9fe5fedefa300740eddcb07 Mon Sep 17 00:00:00 2001 From: npm team Date: Mon, 20 Sep 2021 19:18:58 +0000 Subject: [PATCH 92/95] deps: upgrade npm to 7.24.0 PR-URL: https://github.com/nodejs/node/pull/40167 Reviewed-By: Myles Borins Reviewed-By: Rich Trott --- deps/npm/docs/content/using-npm/scripts.md | 2 +- deps/npm/docs/output/commands/npm-ls.html | 2 +- deps/npm/docs/output/commands/npm.html | 2 +- deps/npm/docs/output/using-npm/scripts.html | 2 +- deps/npm/lib/install.js | 3 +- deps/npm/lib/search/format-package-stream.js | 2 +- deps/npm/lib/utils/config/definitions.js | 12 +- deps/npm/lib/utils/did-you-mean.js | 29 +-- deps/npm/lib/utils/error-message.js | 2 +- deps/npm/lib/view.js | 2 +- deps/npm/man/man1/npm-ls.1 | 2 +- deps/npm/man/man1/npm.1 | 2 +- .../node_modules/init-package-json/LICENSE | 15 -- .../node_modules/init-package-json/LICENSE.md | 18 ++ .../{ => lib}/default-input.js | 177 ++++++++++++------ .../{ => lib}/init-package-json.js | 66 ++++--- .../init-package-json/package.json | 34 ++-- deps/npm/node_modules/minipass/index.js | 14 +- deps/npm/node_modules/minipass/package.json | 4 +- deps/npm/package.json | 4 +- .../tap-snapshots/test/lib/config.js.test.cjs | 4 +- .../test/lib/utils/error-message.js.test.cjs | 34 ++++ .../tap-snapshots/test/lib/view.js.test.cjs | 23 ++- deps/npm/test/lib/search.js | 31 +++ deps/npm/test/lib/utils/config/definitions.js | 30 ++- deps/npm/test/lib/utils/did-you-mean.js | 71 ++++--- deps/npm/test/lib/utils/error-message.js | 11 ++ deps/npm/test/lib/view.js | 12 +- 28 files changed, 429 insertions(+), 181 deletions(-) delete mode 100644 deps/npm/node_modules/init-package-json/LICENSE create mode 100644 deps/npm/node_modules/init-package-json/LICENSE.md rename deps/npm/node_modules/init-package-json/{ => lib}/default-input.js (64%) rename deps/npm/node_modules/init-package-json/{ => lib}/init-package-json.js (79%) diff --git a/deps/npm/docs/content/using-npm/scripts.md b/deps/npm/docs/content/using-npm/scripts.md index 2f2d53c1c2b64b..fba37c28600b3b 100644 --- a/deps/npm/docs/content/using-npm/scripts.md +++ b/deps/npm/docs/content/using-npm/scripts.md @@ -259,7 +259,7 @@ package.json file, then your package scripts would have the in your code with `process.env.npm_package_name` and `process.env.npm_package_version`, and so on for other fields. -See [`package-json.md`](/using-npm/package-json) for more on package configs. +See [`package-json.md`](/configuring-npm/package-json) for more on package configs. #### current lifecycle event diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index 252e478513ffce..0ce0ef1e18a431 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -159,7 +159,7 @@

      Description

      the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm’s source tree will show:

      -
      npm@7.23.0 /path/to/npm
      +
      npm@7.24.0 /path/to/npm
       └─┬ init-package-json@0.0.4
         └── promzard@0.1.5
       
      diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index 6670f691d59ac6..0956b0ef0d92a6 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -148,7 +148,7 @@

      Table of contents

      npm <command> [args]
       

      Version

      -

      7.23.0

      +

      7.24.0

      Description

      npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html index 6dcf9a0416e4c2..1b4556a1f85dd1 100644 --- a/deps/npm/docs/output/using-npm/scripts.html +++ b/deps/npm/docs/output/using-npm/scripts.html @@ -379,7 +379,7 @@

      package.json vars

      npm_package_version set to “1.2.5”. You can access these variables in your code with process.env.npm_package_name and process.env.npm_package_version, and so on for other fields.

      -

      See package-json.md for more on package configs.

      +

      See package-json.md for more on package configs.

      current lifecycle event

      Lastly, the npm_lifecycle_event environment variable is set to whichever stage of the cycle is being executed. So, you could have a diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js index 1589ff589c38e2..99f75b71384fa6 100644 --- a/deps/npm/lib/install.js +++ b/deps/npm/lib/install.js @@ -135,7 +135,8 @@ class Install extends ArboristWorkspaceCmd { // be very strict about engines when trying to update npm itself const npmInstall = args.find(arg => arg.startsWith('npm@') || arg === 'npm') if (isGlobalInstall && npmInstall) { - const npmManifest = await pacote.manifest(npmInstall) + const npmOptions = this.npm.flatOptions + const npmManifest = await pacote.manifest(npmInstall, npmOptions) try { checks.checkEngine(npmManifest, npmManifest.version, process.version) } catch (e) { diff --git a/deps/npm/lib/search/format-package-stream.js b/deps/npm/lib/search/format-package-stream.js index c88df5eb4be04d..fb7d81856d63f4 100644 --- a/deps/npm/lib/search/format-package-stream.js +++ b/deps/npm/lib/search/format-package-stream.js @@ -42,7 +42,7 @@ class JSONOutputStream extends Minipass { } end () { - super.write(this._didFirst ? ']\n' : '\n]\n') + super.write(this._didFirst ? ']\n' : '\n[]\n') super.end() } } diff --git a/deps/npm/lib/utils/config/definitions.js b/deps/npm/lib/utils/config/definitions.js index 092e0fc435cb4e..009f60a7bce61d 100644 --- a/deps/npm/lib/utils/config/definitions.js +++ b/deps/npm/lib/utils/config/definitions.js @@ -2053,10 +2053,14 @@ define('user-agent', { .replace(/\{workspaces\}/gi, inWorkspaces) .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '') .trim() + + // We can't clobber the original or else subsequent flattening will fail + // (i.e. when we change the underlying config values) + // obj[key] = flatOptions.userAgent + // user-agent is a unique kind of config item that gets set from a template // and ends up translated. Because of this, the normal "should we set this // to process.env also doesn't work - obj[key] = flatOptions.userAgent process.env.npm_config_user_agent = flatOptions.userAgent }, }) @@ -2140,6 +2144,9 @@ define('workspace', { a workspace which does not yet exist, to create the folder and set it up as a brand new workspace within the project. `, + flatten: (key, obj, flatOptions) => { + definitions['user-agent'].flatten('user-agent', obj, flatOptions) + }, }) define('workspaces', { @@ -2151,6 +2158,9 @@ define('workspaces', { Enable running a command in the context of **all** the configured workspaces. `, + flatten: (key, obj, flatOptions) => { + definitions['user-agent'].flatten('user-agent', obj, flatOptions) + }, }) define('yes', { diff --git a/deps/npm/lib/utils/did-you-mean.js b/deps/npm/lib/utils/did-you-mean.js index 0cfdd035255eb1..c324253af24065 100644 --- a/deps/npm/lib/utils/did-you-mean.js +++ b/deps/npm/lib/utils/did-you-mean.js @@ -3,25 +3,26 @@ const readJson = require('read-package-json-fast') const { cmdList } = require('./cmd-list.js') const didYouMean = async (npm, path, scmd) => { - const bestCmd = cmdList + let best = cmdList .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd) .map(str => ` npm ${str} # ${npm.commands[str].description}`) - const pkg = await readJson(`${path}/package.json`) - const { scripts } = pkg // We would already be suggesting this in `npm x` so omit them here const runScripts = ['stop', 'start', 'test', 'restart'] - const bestRun = Object.keys(scripts || {}) - .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && - !runScripts.includes(cmd)) - .map(str => ` npm run ${str} # run the "${str}" package script`) - - const { bin } = pkg - const bestBin = Object.keys(bin || {}) - .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4) - .map(str => ` npm exec ${str} # run the "${str}" command from either this or a remote npm package`) - - const best = [...bestCmd, ...bestRun, ...bestBin] + try { + const { bin, scripts } = await readJson(`${path}/package.json`) + best = best.concat( + Object.keys(scripts || {}) + .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && + !runScripts.includes(cmd)) + .map(str => ` npm run ${str} # run the "${str}" package script`), + Object.keys(bin || {}) + .filter(cmd => distance(scmd, cmd) < scmd.length * 0.4) + .map(str => ` npm exec ${str} # run the "${str}" command from either this or a remote npm package`) + ) + } catch (_) { + // gracefully ignore not being in a folder w/ a package.json + } if (best.length === 0) return '' diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js index 6e12bcb918eef8..9343d37d541495 100644 --- a/deps/npm/lib/utils/error-message.js +++ b/deps/npm/lib/utils/error-message.js @@ -367,7 +367,7 @@ module.exports = (er, npm) => { detail.push(['signal', er.signal]) if (er.cmd && Array.isArray(er.args)) - detail.push(['command', ...[er.cmd, ...er.args]]) + detail.push(['command', ...[er.cmd, ...er.args.map(replaceInfo)]]) if (er.stdout) detail.push(['', er.stdout.trim()]) diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js index f4fc5974eeeca7..0124bfb7d35433 100644 --- a/deps/npm/lib/view.js +++ b/deps/npm/lib/view.js @@ -336,7 +336,7 @@ class View extends BaseCommand { email: color.cyan(manifest._npmUser.email), }), modified: !packument.time ? undefined - : color.yellow(relativeDate(packument.time[packument.version])), + : color.yellow(relativeDate(packument.time[manifest.version])), maintainers: (packument.maintainers || []).map((u) => unparsePerson({ name: color.yellow(u.name), email: color.cyan(u.email), diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 8ff55b216f2a4b..e9c5d78d82abf1 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -26,7 +26,7 @@ example, running \fBnpm ls promzard\fP in npm's source tree will show: .P .RS 2 .nf -npm@7\.23\.0 /path/to/npm +npm@7\.24\.0 /path/to/npm └─┬ init\-package\-json@0\.0\.4 └── promzard@0\.1\.5 .fi diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index e9e11652387d3e..8e07933b219606 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -10,7 +10,7 @@ npm [args] .RE .SS Version .P -7\.23\.0 +7\.24\.0 .SS Description .P npm is the package manager for the Node JavaScript platform\. It puts diff --git a/deps/npm/node_modules/init-package-json/LICENSE b/deps/npm/node_modules/init-package-json/LICENSE deleted file mode 100644 index 05eeeb88c2ef4c..00000000000000 --- a/deps/npm/node_modules/init-package-json/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/init-package-json/LICENSE.md b/deps/npm/node_modules/init-package-json/LICENSE.md new file mode 100644 index 00000000000000..845be76f64e789 --- /dev/null +++ b/deps/npm/node_modules/init-package-json/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/init-package-json/default-input.js b/deps/npm/node_modules/init-package-json/lib/default-input.js similarity index 64% rename from deps/npm/node_modules/init-package-json/default-input.js rename to deps/npm/node_modules/init-package-json/lib/default-input.js index d1f65841d6c5a3..0003472975760b 100644 --- a/deps/npm/node_modules/init-package-json/default-input.js +++ b/deps/npm/node_modules/init-package-json/lib/default-input.js @@ -1,5 +1,5 @@ +/* eslint-disable no-undef */ var fs = require('fs') -var glob = require('glob') var path = require('path') var validateLicense = require('validate-npm-package-license') var validateName = require('validate-npm-package-name') @@ -15,35 +15,57 @@ function niceName (n) { return n.replace(/^node-|[.-]js$/g, '').replace(/\s+/g, ' ').replace(/ /g, '-').toLowerCase() } -function readDeps (test, excluded) { return function (cb) { - fs.readdir('node_modules', function (er, dir) { - if (er) return cb() - var deps = {} - var n = dir.length - if (n === 0) return cb(null, deps) - dir.forEach(function (d) { - if (d.match(/^\./)) return next() - if (test !== isTestPkg(d) || excluded[d]) - return next() - - var dp = path.join(dirname, 'node_modules', d, 'package.json') - fs.readFile(dp, 'utf8', function (er, p) { - if (er) return next() - try { p = JSON.parse(p) } - catch (e) { return next() } - if (!p.version) return next() - if (p._requiredBy) { - if (!p._requiredBy.some(function (req) { return req === '#USER' })) return next() +function readDeps (test, excluded) { + return function (cb) { + fs.readdir('node_modules', function (er, dir) { + if (er) { + return cb() + } + var deps = {} + var n = dir.length + if (n === 0) { + return cb(null, deps) + } + dir.forEach(function (d) { + if (d.match(/^\./)) { + return next() + } + if (test !== isTestPkg(d) || excluded[d]) { + return next() } - deps[d] = config.get('save-exact') ? p.version : config.get('save-prefix') + p.version - return next() + + var dp = path.join(dirname, 'node_modules', d, 'package.json') + fs.readFile(dp, 'utf8', function (er, p) { + if (er) { + return next() + } + try { + p = JSON.parse(p) + } catch (e) { + return next() + } + if (!p.version) { + return next() + } + if (p._requiredBy) { + if (!p._requiredBy.some(function (req) { + return req === '#USER' + })) { + return next() + } + } + deps[d] = config.get('save-exact') ? p.version : config.get('save-prefix') + p.version + return next() + }) }) + function next () { + if (--n === 0) { + return cb(null, deps) + } + } }) - function next () { - if (--n === 0) return cb(null, deps) - } - }) -}} + } +} var name = niceName(package.name || basename) var spec @@ -54,16 +76,20 @@ try { } var scope = config.get('scope') if (scope) { - if (scope.charAt(0) !== '@') scope = '@' + scope + if (scope.charAt(0) !== '@') { + scope = '@' + scope + } if (spec.scope) { name = scope + '/' + spec.name.split('/')[1] } else { name = scope + '/' + name } } -exports.name = yes ? name : prompt('package name', name, function (data) { +exports.name = yes ? name : prompt('package name', name, function (data) { var its = validateName(data) - if (its.validForNewPackages) return data + if (its.validForNewPackages) { + return data + } var errors = (its.errors || []).concat(its.warnings || []) var er = new Error('Sorry, ' + errors.join(' and ') + '.') er.notValid = true @@ -83,7 +109,9 @@ var version = package.version || exports.version = yes ? version : prompt('version', version, function (version) { - if (semver.valid(version)) return version + if (semver.valid(version)) { + return version + } var er = new Error('Invalid version: "' + version + '"') er.notValid = true return er @@ -96,22 +124,25 @@ if (!package.description) { if (!package.main) { exports.main = function (cb) { fs.readdir(dirname, function (er, f) { - if (er) f = [] + if (er) { + f = [] + } f = f.filter(function (f) { return f.match(/\.js$/) }) - if (f.indexOf('index.js') !== -1) + if (f.indexOf('index.js') !== -1) { f = 'index.js' - else if (f.indexOf('main.js') !== -1) + } else if (f.indexOf('main.js') !== -1) { f = 'main.js' - else if (f.indexOf(basename + '.js') !== -1) + } else if (f.indexOf(basename + '.js') !== -1) { f = basename + '.js' - else + } else { f = f[0] + } - var index = f || 'index.js' + var index = f || 'index.js' return cb(null, yes ? index : prompt('entry point', index)) }) } @@ -121,18 +152,24 @@ if (!package.bin) { exports.bin = function (cb) { fs.readdir(path.resolve(dirname, 'bin'), function (er, d) { // no bins - if (er) return cb() + if (er) { + return cb() + } // just take the first js file we find there, or nada - return cb(null, d.filter(function (f) { - return f.match(/\.js$/) - })[0]) + let r = d.find(f => f.match(/\.js$/)) + if (r) { + r = `bin/${r}` + } + return cb(null, r) }) } } exports.directories = function (cb) { fs.readdir(dirname, function (er, dirs) { - if (er) return cb(er) + if (er) { + return cb(er) + } var res = {} dirs.forEach(function (d) { switch (d) { @@ -143,7 +180,9 @@ exports.directories = function (cb) { case 'lib': return res.lib = d } }) - if (Object.keys(res).length === 0) res = undefined + if (Object.keys(res).length === 0) { + res = undefined + } return cb(null, res) }) } @@ -173,13 +212,15 @@ function setupScripts (d, cb) { } if (!s.test || s.test === notest) { var commands = { - 'tap':'tap test/*.js' - , 'expresso':'expresso test' - , 'mocha':'mocha' + tap: 'tap test/*.js', + expresso: 'expresso test', + mocha: 'mocha', } var command Object.keys(commands).forEach(function (k) { - if (d.indexOf(k) !== -1) command = commands[k] + if (d.indexOf(k) !== -1) { + command = commands[k] + } }) var ps = 'test command' if (yes) { @@ -201,12 +242,18 @@ if (!package.repository) { var i = gconf.indexOf('[remote "origin"]') if (i !== -1) { var u = gconf[i + 1] - if (!u.match(/^\s*url =/)) u = gconf[i + 2] - if (!u.match(/^\s*url =/)) u = null - else u = u.replace(/^\s*url = /, '') + if (!u.match(/^\s*url =/)) { + u = gconf[i + 2] + } + if (!u.match(/^\s*url =/)) { + u = null + } else { + u = u.replace(/^\s*url = /, '') + } } - if (u && u.match(/^git@github.com:/)) + if (u && u.match(/^git@github.com:/)) { u = u.replace(/^git@github.com:/, 'https://github.com/') + } return cb(null, yes ? u : prompt('git repository', u)) }) @@ -215,9 +262,15 @@ if (!package.repository) { if (!package.keywords) { exports.keywords = yes ? '' : prompt('keywords', function (s) { - if (!s) return undefined - if (Array.isArray(s)) s = s.join(' ') - if (typeof s !== 'string') return s + if (!s) { + return undefined + } + if (Array.isArray(s)) { + s = s.join(' ') + } + if (typeof s !== 'string') { + return s + } return s.split(/[\s,]+/) }) } @@ -225,15 +278,15 @@ if (!package.keywords) { if (!package.author) { exports.author = config.get('init.author.name') || config.get('init-author-name') - ? { - "name" : config.get('init.author.name') || + ? { + name: config.get('init.author.name') || config.get('init-author-name'), - "email" : config.get('init.author.email') || + email: config.get('init.author.email') || config.get('init-author-email'), - "url" : config.get('init.author.url') || - config.get('init-author-url') + url: config.get('init.author.url') || + config.get('init-author-url'), } - : yes ? '' : prompt('author') + : yes ? '' : prompt('author') } const defaultDottedInitLicense = config && @@ -248,7 +301,9 @@ var license = package.license || 'ISC' exports.license = yes ? license : prompt('license', license, function (data) { var its = validateLicense(data) - if (its.validForNewPackages) return data + if (its.validForNewPackages) { + return data + } var errors = (its.errors || []).concat(its.warnings || []) var er = new Error('Sorry, ' + errors.join(' and ') + '.') er.notValid = true diff --git a/deps/npm/node_modules/init-package-json/init-package-json.js b/deps/npm/node_modules/init-package-json/lib/init-package-json.js similarity index 79% rename from deps/npm/node_modules/init-package-json/init-package-json.js rename to deps/npm/node_modules/init-package-json/lib/init-package-json.js index 83e7342d0aa4f4..bee79351caab3f 100644 --- a/deps/npm/node_modules/init-package-json/init-package-json.js +++ b/deps/npm/node_modules/init-package-json/lib/init-package-json.js @@ -12,7 +12,6 @@ var read = require('read') // to validate the data object at the end as a worthwhile package // and assign default values for things. -// readJson.extras(file, data, cb) var readJson = require('read-package-json') function yes (conf) { @@ -23,8 +22,10 @@ function yes (conf) { } function init (dir, input, config, cb) { - if (typeof config === 'function') - cb = config, config = {} + if (typeof config === 'function') { + cb = config + config = {} + } // accept either a plain-jane object, or a config object // with a "get" method. @@ -36,7 +37,7 @@ function init (dir, input, config, cb) { }, toJSON: function () { return data - } + }, } } @@ -52,14 +53,18 @@ function init (dir, input, config, cb) { readJson(packageFile, function (er, d) { readJson.extraSet = es - if (er) pkg = {} - else pkg = d + if (er) { + pkg = {} + } else { + pkg = d + } ctx.filename = packageFile ctx.dirname = path.dirname(packageFile) ctx.basename = path.basename(ctx.dirname) - if (!pkg.version || !semver.valid(pkg.version)) + if (!pkg.version || !semver.valid(pkg.version)) { delete pkg.version + } ctx.package = pkg ctx.config = config || {} @@ -71,7 +76,9 @@ function init (dir, input, config, cb) { pz.on('error', cb) pz.on('data', function (data) { Object.keys(data).forEach(function (k) { - if (data[k] !== undefined && data[k] !== null) pkg[k] = data[k] + if (data[k] !== undefined && data[k] !== null) { + pkg[k] = data[k] + } }) // only do a few of these. @@ -81,8 +88,10 @@ function init (dir, input, config, cb) { return fn.name !== 'authors' && fn.name !== 'mans' }) readJson.extras(packageFile, pkg, function (er, pkg) { + if (er) { + return cb(er, pkg) + } readJson.extraSet = es - if (er) return cb(er, pkg) pkg = unParsePeople(pkg) // no need for the readme now. delete pkg.readme @@ -95,13 +104,15 @@ function init (dir, input, config, cb) { delete pkg.gitHead // if the repo is empty, remove it. - if (!pkg.repository) + if (!pkg.repository) { delete pkg.repository + } // readJson filters out empty descriptions, but init-package-json // traditionally leaves them alone - if (!pkg.description) + if (!pkg.description) { pkg.description = data.description + } var d = JSON.stringify(updateDeps(pkg), null, 2) + '\n' function write (yes) { @@ -116,7 +127,7 @@ function init (dir, input, config, cb) { return write(true) } console.log('About to write to %s:\n\n%s\n', packageFile, d) - read({prompt:'Is this OK? ', default: 'yes'}, function (er, ok) { + read({prompt: 'Is this OK? ', default: 'yes'}, function (er, ok) { if (er) { return cb(er) } @@ -129,18 +140,19 @@ function init (dir, input, config, cb) { }) }) }) - } -function updateDeps(depsData) { +function updateDeps (depsData) { // optionalDependencies don't need to be repeated in two places if (depsData.dependencies) { if (depsData.optionalDependencies) { - for (const name of Object.keys(depsData.optionalDependencies)) + for (const name of Object.keys(depsData.optionalDependencies)) { delete depsData.dependencies[name] + } } - if (Object.keys(depsData.dependencies).length === 0) + if (Object.keys(depsData.dependencies).length === 0) { delete depsData.dependencies + } } return depsData @@ -148,21 +160,25 @@ function updateDeps(depsData) { // turn the objects into somewhat more humane strings. function unParsePeople (data) { - if (data.author) data.author = unParsePerson(data.author) - ;["maintainers", "contributors"].forEach(function (set) { - if (!Array.isArray(data[set])) return; + if (data.author) { + data.author = unParsePerson(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } data[set] = data[set].map(unParsePerson) }) return data } function unParsePerson (person) { - if (typeof person === "string") return person - var name = person.name || "" + if (typeof person === 'string') { + return person + } + var name = person.name || '' var u = person.url || person.web - var url = u ? (" ("+u+")") : "" + var url = u ? (' (' + u + ')') : '' var e = person.email || person.mail - var email = e ? (" <"+e+">") : "" - return name+email+url + var email = e ? (' <' + e + '>') : '' + return name + email + url } - diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json index 0e07f48f49746d..6d642f6cf6879f 100644 --- a/deps/npm/node_modules/init-package-json/package.json +++ b/deps/npm/node_modules/init-package-json/package.json @@ -1,41 +1,46 @@ { "name": "init-package-json", - "version": "2.0.4", - "main": "init-package-json.js", + "version": "2.0.5", + "main": "lib/init-package-json.js", "scripts": { "test": "tap", "preversion": "npm test", "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" + "prepublishOnly": "git push origin --follow-tags", + "lint": "eslint '**/*.js'", + "postlint": "npm-template-check", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" }, "repository": { "type": "git", "url": "https://github.com/npm/init-package-json.git" }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": "GitHub Inc.", "license": "ISC", "description": "A node module to get your node module started", "dependencies": { - "glob": "^7.1.1", - "npm-package-arg": "^8.1.2", + "npm-package-arg": "^8.1.5", "promzard": "^0.3.0", "read": "~1.0.1", - "read-package-json": "^4.0.0", + "read-package-json": "^4.1.1", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" }, "devDependencies": { "@npmcli/config": "^2.1.0", - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2", - "tap": "^14.11.0" + "@npmcli/template-oss": "^1.0.3", + "tap": "^15.0.9" }, "engines": { "node": ">=10" }, "tap": { - "jobs": "1" + "statements": "94", + "branches": "83", + "lines": "94" }, "keywords": [ "init", @@ -48,7 +53,8 @@ "start" ], "files": [ - "default-input.js", - "init-package-json.js" - ] + "bin", + "lib" + ], + "templateVersion": "1.0.3" } diff --git a/deps/npm/node_modules/minipass/index.js b/deps/npm/node_modules/minipass/index.js index 56cbd665d2526d..ae134a066d77f0 100644 --- a/deps/npm/node_modules/minipass/index.js +++ b/deps/npm/node_modules/minipass/index.js @@ -1,4 +1,8 @@ 'use strict' +const proc = typeof process === 'object' && process ? process : { + stdout: null, + stderr: null, +} const EE = require('events') const Stream = require('stream') const Yallist = require('yallist') @@ -8,6 +12,7 @@ const EOF = Symbol('EOF') const MAYBE_EMIT_END = Symbol('maybeEmitEnd') const EMITTED_END = Symbol('emittedEnd') const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') const CLOSED = Symbol('closed') const READ = Symbol('read') const FLUSH = Symbol('flush') @@ -66,6 +71,7 @@ module.exports = class Minipass extends Stream { this[EMITTED_END] = false this[EMITTING_END] = false this[CLOSED] = false + this[EMITTED_ERROR] = null this.writable = true this.readable = true this[BUFFERLENGTH] = 0 @@ -310,7 +316,7 @@ module.exports = class Minipass extends Stream { const ended = this[EMITTED_END] opts = opts || {} - if (dest === process.stdout || dest === process.stderr) + if (dest === proc.stdout || dest === proc.stderr) opts.end = false else opts.end = opts.end !== false @@ -339,6 +345,8 @@ module.exports = class Minipass extends Stream { else if (isEndish(ev) && this[EMITTED_END]) { super.emit(ev) this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + fn.call(this, this[EMITTED_ERROR]) } } } @@ -400,6 +408,8 @@ module.exports = class Minipass extends Stream { // don't emit close before 'end' and 'finish' if (!this[EMITTED_END] && !this[DESTROYED]) return + } else if (ev === 'error') { + this[EMITTED_ERROR] = data } // TODO: replace with a spread operator when Node v4 support drops @@ -452,8 +462,8 @@ module.exports = class Minipass extends Stream { promise () { return new Promise((resolve, reject) => { this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('end', () => resolve()) this.on('error', er => reject(er)) + this.on('end', () => resolve()) }) } diff --git a/deps/npm/node_modules/minipass/package.json b/deps/npm/node_modules/minipass/package.json index 54f62d56d46d86..165fa662ab4a7c 100644 --- a/deps/npm/node_modules/minipass/package.json +++ b/deps/npm/node_modules/minipass/package.json @@ -1,6 +1,6 @@ { "name": "minipass", - "version": "3.1.3", + "version": "3.1.5", "description": "minimal implementation of a PassThrough stream", "main": "index.js", "dependencies": { @@ -8,7 +8,7 @@ }, "devDependencies": { "end-of-stream": "^1.4.0", - "tap": "^14.6.5", + "tap": "^15.0.9", "through2": "^2.0.3" }, "scripts": { diff --git a/deps/npm/package.json b/deps/npm/package.json index 20b80c7ebe21c7..38b45947706dc7 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "7.23.0", + "version": "7.24.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -74,7 +74,7 @@ "graceful-fs": "^4.2.8", "hosted-git-info": "^4.0.2", "ini": "^2.0.0", - "init-package-json": "^2.0.4", + "init-package-json": "^2.0.5", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^2.3.1", "libnpmaccess": "^4.0.2", diff --git a/deps/npm/tap-snapshots/test/lib/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/config.js.test.cjs index 8f349a6f54249e..dab7ef55f64e82 100644 --- a/deps/npm/tap-snapshots/test/lib/config.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/config.js.test.cjs @@ -146,7 +146,7 @@ exports[`test/lib/config.js TAP config list --json > output matches snapshot 1`] "unicode": false, "update-notifier": true, "usage": false, - "user-agent": "npm/{NPM-VERSION} node/{NODE-VERSION} {PLATFORM} {ARCH} workspaces/false", + "user-agent": "npm/{npm-version} node/{node-version} {platform} {arch} workspaces/{workspaces} {ci}", "version": false, "versions": false, "viewer": "{VIEWER}", @@ -296,7 +296,7 @@ umask = 0 unicode = false update-notifier = true usage = false -user-agent = "npm/{NPM-VERSION} node/{NODE-VERSION} {PLATFORM} {ARCH} workspaces/false" +user-agent = "npm/{npm-version} node/{node-version} {platform} {arch} workspaces/{workspaces} {ci}" ; userconfig = "{HOME}/.npmrc" ; overridden by cli version = false versions = false diff --git a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs index 4eb5ea3bc5df59..c963ca2040e6f5 100644 --- a/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/utils/error-message.js.test.cjs @@ -180,6 +180,40 @@ Object { } ` +exports[`test/lib/utils/error-message.js TAP args are cleaned > must match snapshot 1`] = ` +Object { + "detail": Array [ + Array [ + "signal", + "SIGYOLO", + ], + Array [ + "command", + "some command", + "a", + "r", + "g", + "s", + "https://evil:***@npmjs.org", + ], + Array [ + "", + "stdout", + ], + Array [ + "", + "stderr", + ], + ], + "summary": Array [ + Array [ + "", + "cmd err", + ], + ], +} +` + exports[`test/lib/utils/error-message.js TAP bad engine with config loaded > must match snapshot 1`] = ` Object { "detail": Array [ diff --git a/deps/npm/tap-snapshots/test/lib/view.js.test.cjs b/deps/npm/tap-snapshots/test/lib/view.js.test.cjs index 27ba7b1eb69276..9ed8334138cf85 100644 --- a/deps/npm/tap-snapshots/test/lib/view.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/view.js.test.cjs @@ -82,7 +82,7 @@ dist dist-tags: latest: 1.0.0 -published {TIME} ago +published yesterday ` exports[`test/lib/view.js TAP should log info of package in current working dir specific version > must match snapshot 1`] = ` @@ -99,7 +99,7 @@ dist dist-tags: latest: 1.0.0 -published {TIME} ago +published yesterday ` exports[`test/lib/view.js TAP should log package info package from git > must match snapshot 1`] = ` @@ -302,7 +302,24 @@ dist dist-tags: latest: 1.0.0 -published {TIME} ago +published yesterday +` + +exports[`test/lib/view.js TAP should log package info package with semver range > must match snapshot 1`] = ` + + +blue@1.0.0 | Proprietary | deps: none | versions: 2 + +dist +.tarball:http://hm.blue.com/1.0.0.tgz +.shasum:123 +.integrity:--- +.unpackedSize:1 B + +dist-tags: +latest: 1.0.0 + +published yesterday ` exports[`test/lib/view.js TAP workspaces all workspaces --json > must match snapshot 1`] = ` diff --git a/deps/npm/test/lib/search.js b/deps/npm/test/lib/search.js index 510a470f48088e..55b584b8aa7dc5 100644 --- a/deps/npm/test/lib/search.js +++ b/deps/npm/test/lib/search.js @@ -130,6 +130,37 @@ t.test('search --json', (t) => { src.end() }) +t.test('search --json', (t) => { + const src = new Minipass() + src.objectMode = true + + npm.flatOptions.json = true + config.json = true + const libnpmsearch = { + stream () { + return src + }, + } + + const Search = t.mock('../../lib/search.js', { + ...mocks, + libnpmsearch, + }) + const search = new Search(npm) + + search.exec(['foo'], (err) => { + if (err) + throw err + + t.equal(result, '\n[]\n', 'should have expected empty square brackets') + + config.json = false + t.end() + }) + + src.end() +}) + t.test('search --searchexclude --searchopts', t => { npm.flatOptions.search = { ...flatOptions.search, diff --git a/deps/npm/test/lib/utils/config/definitions.js b/deps/npm/test/lib/utils/config/definitions.js index 65193020d050c5..88993303b539cb 100644 --- a/deps/npm/test/lib/utils/config/definitions.js +++ b/deps/npm/test/lib/utils/config/definitions.js @@ -747,7 +747,7 @@ t.test('user-agent', t => { definitions['user-agent'].flatten('user-agent', obj, flat) t.equal(flat.userAgent, expectNoCI) t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') - t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + t.not(obj['user-agent'], flat.userAgent, 'config user-agent template is not translated') obj['ci-name'] = 'foo' obj['user-agent'] = definitions['user-agent'].default @@ -755,7 +755,7 @@ t.test('user-agent', t => { definitions['user-agent'].flatten('user-agent', obj, flat) t.equal(flat.userAgent, expectCI) t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') - t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + t.not(obj['user-agent'], flat.userAgent, 'config user-agent template is not translated') delete obj['ci-name'] obj.workspaces = true @@ -764,7 +764,7 @@ t.test('user-agent', t => { definitions['user-agent'].flatten('user-agent', obj, flat) t.equal(flat.userAgent, expectWorkspaces) t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') - t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + t.not(obj['user-agent'], flat.userAgent, 'config user-agent template is not translated') delete obj.workspaces obj.workspace = ['foo'] @@ -772,7 +772,7 @@ t.test('user-agent', t => { definitions['user-agent'].flatten('user-agent', obj, flat) t.equal(flat.userAgent, expectWorkspaces) t.equal(process.env.npm_config_user_agent, flat.userAgent, 'npm_user_config environment is set') - t.equal(obj['user-agent'], flat.userAgent, 'config user-agent template is translated') + t.not(obj['user-agent'], flat.userAgent, 'config user-agent template is not translated') t.end() }) @@ -853,3 +853,25 @@ t.test('package-lock-only', t => { t.strictSame(flat, { packageLock: false, packageLockOnly: false }) t.end() }) + +t.test('workspaces', t => { + const obj = { + workspaces: true, + 'user-agent': definitions['user-agent'].default, + } + const flat = {} + definitions.workspaces.flatten('workspaces', obj, flat) + t.match(flat.userAgent, /workspaces\/true/) + t.end() +}) + +t.test('workspace', t => { + const obj = { + workspace: ['workspace-a'], + 'user-agent': definitions['user-agent'].default, + } + const flat = {} + definitions.workspace.flatten('workspaces', obj, flat) + t.match(flat.userAgent, /workspaces\/true/) + t.end() +}) diff --git a/deps/npm/test/lib/utils/did-you-mean.js b/deps/npm/test/lib/utils/did-you-mean.js index 15712b665be6eb..1285d5300853bc 100644 --- a/deps/npm/test/lib/utils/did-you-mean.js +++ b/deps/npm/test/lib/utils/did-you-mean.js @@ -5,34 +5,55 @@ const dym = require('../../../lib/utils/did-you-mean.js') t.test('did-you-mean', t => { npm.load(err => { t.notOk(err) - t.test('nistall', async t => { - const result = await dym(npm, npm.localPrefix, 'nistall') - t.match(result, 'npm install') - }) - t.test('sttest', async t => { - const result = await dym(npm, npm.localPrefix, 'sttest') - t.match(result, 'npm test') - t.match(result, 'npm run posttest') + t.test('with package.json', t => { + const testdir = t.testdir({ + 'package.json': JSON.stringify({ + bin: { + npx: 'exists', + }, + scripts: { + install: 'exists', + posttest: 'exists', + }, + }), + }) + t.test('nistall', async t => { + const result = await dym(npm, testdir, 'nistall') + t.match(result, 'npm install') + }) + t.test('sttest', async t => { + const result = await dym(npm, testdir, 'sttest') + t.match(result, 'npm test') + t.match(result, 'npm run posttest') + }) + t.test('npz', async t => { + const result = await dym(npm, testdir, 'npxx') + t.match(result, 'npm exec npx') + }) + t.test('qwuijbo', async t => { + const result = await dym(npm, testdir, 'qwuijbo') + t.match(result, '') + }) + t.end() }) - t.test('npz', async t => { - const result = await dym(npm, npm.localPrefix, 'npxx') - t.match(result, 'npm exec npx') + t.test('with no package.json', t => { + const testdir = t.testdir({}) + t.test('nistall', async t => { + const result = await dym(npm, testdir, 'nistall') + t.match(result, 'npm install') + }) + t.end() }) - t.test('qwuijbo', async t => { - const result = await dym(npm, npm.localPrefix, 'qwuijbo') - t.match(result, '') + t.test('missing bin and script properties', async t => { + const testdir = t.testdir({ + 'package.json': JSON.stringify({ + name: 'missing-bin', + }), + }) + + const result = await dym(npm, testdir, 'nistall') + t.match(result, 'npm install') }) t.end() }) }) - -t.test('missing bin and script properties', async t => { - const path = t.testdir({ - 'package.json': JSON.stringify({ - name: 'missing-bin', - }), - }) - - const result = await dym(npm, path, 'nistall') - t.match(result, 'npm install') -}) diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js index d1c67a95137c44..6b2b5c9222e77a 100644 --- a/deps/npm/test/lib/utils/error-message.js +++ b/deps/npm/test/lib/utils/error-message.js @@ -201,6 +201,17 @@ t.test('default message', t => { t.end() }) +t.test('args are cleaned', t => { + t.matchSnapshot(errorMessage(Object.assign(new Error('cmd err'), { + cmd: 'some command', + signal: 'SIGYOLO', + args: ['a', 'r', 'g', 's', 'https://evil:password@npmjs.org'], + stdout: 'stdout', + stderr: 'stderr', + }), npm)) + t.end() +}) + t.test('eacces/eperm', t => { const runTest = (windows, loaded, cachePath, cacheDest) => t => { if (windows) diff --git a/deps/npm/test/lib/view.js b/deps/npm/test/lib/view.js index 793917adc6476d..096ababb29ae83 100644 --- a/deps/npm/test/lib/view.js +++ b/deps/npm/test/lib/view.js @@ -17,6 +17,9 @@ const cleanLogs = () => { console.log = fn } +// 25 hours ago +const yesterday = new Date(Date.now() - 1000 * 60 * 60 * 25) + const packument = (nv, opts) => { if (!opts.fullMetadata) throw new Error('must fetch fullMetadata') @@ -40,7 +43,7 @@ const packument = (nv, opts) => { latest: '1.0.0', }, time: { - '1.0.0': '2019-08-06T16:21:09.842Z', + '1.0.0': yesterday, }, versions: { '1.0.0': { @@ -332,6 +335,13 @@ t.test('should log package info', t => { }) }) + t.test('package with semver range', t => { + view.exec(['blue@^1.0.0'], () => { + t.matchSnapshot(logs) + t.end() + }) + }) + t.test('package with no modified time', t => { viewUnicode.exec(['cyan@1.0.0'], () => { t.matchSnapshot(logs) From e4825dcfd5f1a347d68f716127f5c442aa083698 Mon Sep 17 00:00:00 2001 From: RISHABH BUDHIRAJA Date: Wed, 14 Jul 2021 23:25:49 +0530 Subject: [PATCH 93/95] doc: changes default values for fs.read fns PR-URL: https://github.com/nodejs/node/pull/39163 Fixes: https://github.com/nodejs/node/issues/39034 Reviewed-By: Nitzan Uziely Reviewed-By: Qingyu Deng --- doc/api/fs.md | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index ab33a4009730e0..1d1574a5a6e393 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -258,9 +258,7 @@ added: v10.0.0 * `buffer` {Buffer|TypedArray|DataView} A buffer that will be filled with the file data read. * `offset` {integer} The location in the buffer at which to start filling. - **Default:** `0` -* `length` {integer} The number of bytes to read. **Default:** - `buffer.byteLength` +* `length` {integer} The number of bytes to read. * `position` {integer} The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an integer, the current @@ -2836,11 +2834,9 @@ changes: * `fd` {integer} * `buffer` {Buffer|TypedArray|DataView} The buffer that the data will be - written to. **Default:** `Buffer.alloc(16384)` -* `offset` {integer} The position in `buffer` to write the data to. **Default:** - `0` -* `length` {integer} The number of bytes to read. **Default:** - `buffer.byteLength` + written to. +* `offset` {integer} The position in `buffer` to write the data to. +* `length` {integer} The number of bytes to read. * `position` {integer|bigint} Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If `position` is an From 4444b5c938cad7356cff39f93eeb8693965a36fd Mon Sep 17 00:00:00 2001 From: "Node.js GitHub Bot" Date: Sun, 19 Sep 2021 00:13:27 +0000 Subject: [PATCH 94/95] meta: update AUTHORS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/40148 Reviewed-By: Rich Trott Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- AUTHORS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/AUTHORS b/AUTHORS index 602727b0a389e3..fc25b4270f00ed 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3316,5 +3316,8 @@ Christian Boehlke null <60427892+vierofernando@users.noreply.github.com> Dominic Elm treysis +shfshanyue +Nikita Galkin +vipul kumar # Generated by tools/update-authors.js From aff2a0a4754ddba26e161bc66cd4392fc28dd146 Mon Sep 17 00:00:00 2001 From: Beth Griggs Date: Wed, 22 Sep 2021 00:43:16 +0100 Subject: [PATCH 95/95] 2021-09-22, Version 16.10.0 (Current) Notable changes: crypto: * (SEMVER-MINOR) add rsa-pss keygen parameters (Filip Skokan) https://github.com/nodejs/node/pull/39927 doc: * add Ayase-252 to collaborators (Qingyu Deng) https://github.com/nodejs/node/pull/40078 fs: * (SEMVER-MINOR) make `open` and `close` stream override optional when unused (Antoine du Hamel) https://github.com/nodejs/node/pull/40013 http: * (SEMVER-MINOR) limit requests per connection (Artur K) https://github.com/nodejs/node/pull/40082 src: * (SEMVER-MINOR) add --no-global-search-paths cli option (Cheng Zhao) https://github.com/nodejs/node/pull/39754 * (SEMVER-MINOR) add option to disable global search paths (Cheng Zhao) https://github.com/nodejs/node/pull/39754 * (SEMVER-MINOR) make napi_create_reference accept symbol (JckXia) https://github.com/nodejs/node/pull/39926 stream: * (SEMVER-MINOR) add signal support to pipeline generators (Robert Nagy) https://github.com/nodejs/node/pull/39067 PR-URL: https://github.com/nodejs/node/pull/40175 --- CHANGELOG.md | 3 +- doc/api/cli.md | 4 +- doc/api/crypto.md | 4 +- doc/api/deprecations.md | 4 +- doc/api/errors.md | 2 +- doc/api/fs.md | 8 +-- doc/api/http.md | 2 +- doc/changelogs/CHANGELOG_V16.md | 115 ++++++++++++++++++++++++++++++++ src/node_version.h | 6 +- 9 files changed, 132 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c265902e7e281..d8b6e5e47c06d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ release. -16.9.1
      +16.10.0
      +16.9.1
      16.9.0
      16.8.0
      16.7.0
      diff --git a/doc/api/cli.md b/doc/api/cli.md index f6e9e6cfcaf63a..f7e66a38baf571 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -597,7 +597,7 @@ This option is a no-op. It is kept for compatibility. ### `--no-addons` Disable the `node-addons` exports condition as well as disable loading @@ -621,7 +621,7 @@ dynamically when `async_hooks` is enabled. ### `--no-global-search-paths` Do not search modules from global paths like `$HOME/.node_modules` and diff --git a/doc/api/crypto.md b/doc/api/crypto.md index a095ece29a9ace..5974757e8bcf1e 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -3375,7 +3375,7 @@ generateKey('hmac', { length: 64 }, (err, key) => { @@ -2815,7 +2815,7 @@ and `'mgf1HashAlgorithm'`. ### DEP0155: Trailing slashes in pattern specifier resolutions Loading native addons has been disabled using [`--no-addons`][]. diff --git a/doc/api/fs.md b/doc/api/fs.md index 1d1574a5a6e393..8dfdf2dd66021a 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1885,10 +1885,10 @@ behavior is similar to `cp dir1/ dir2/`. * {number} Requests per socket. **Default:** null (no limit) diff --git a/doc/changelogs/CHANGELOG_V16.md b/doc/changelogs/CHANGELOG_V16.md index 128467175d01df..2fb536441320ee 100644 --- a/doc/changelogs/CHANGELOG_V16.md +++ b/doc/changelogs/CHANGELOG_V16.md @@ -10,6 +10,7 @@ +16.10.0
      16.9.1
      16.9.0
      16.8.0
      @@ -47,6 +48,120 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2021-09-22, Version 16.10.0 (Current), @BethGriggs + +### Notable Changes + +* [[`fb226ff2ee`](https://github.com/nodejs/node/commit/fb226ff2ee)] - **(SEMVER-MINOR)** **crypto**: add rsa-pss keygen parameters (Filip Skokan) [#39927](https://github.com/nodejs/node/pull/39927) +* [[`85206b7311`](https://github.com/nodejs/node/commit/85206b7311)] - **deps**: upgrade npm to 7.24.0 (npm team) [#40167](https://github.com/nodejs/node/pull/40167) +* [[`98f56d179c`](https://github.com/nodejs/node/commit/98f56d179c)] - **deps**: update Acorn to v8.5.0 (Michaël Zasso) [#40015](https://github.com/nodejs/node/pull/40015) +* [[`9655329772`](https://github.com/nodejs/node/commit/9655329772)] - **doc**: add Ayase-252 to collaborators (Qingyu Deng) [#40078](https://github.com/nodejs/node/pull/40078) +* [[`59fff925be`](https://github.com/nodejs/node/commit/59fff925be)] - **(SEMVER-MINOR)** **fs**: make `open` and `close` stream override optional when unused (Antoine du Hamel) [#40013](https://github.com/nodejs/node/pull/40013) +* [[`a63a4bce90`](https://github.com/nodejs/node/commit/a63a4bce90)] - **(SEMVER-MINOR)** **http**: limit requests per connection (Artur K) [#40082](https://github.com/nodejs/node/pull/40082) + * The maximum number of requests a socket can handle before closing keep alive connection can be set with `server.maxRequestsPerSocket`. +* [[`9a672961fa`](https://github.com/nodejs/node/commit/9a672961fa)] - **(SEMVER-MINOR)** **src**: add --no-global-search-paths cli option (Cheng Zhao) [#39754](https://github.com/nodejs/node/pull/39754) + * Adds the `--no-global-search-paths` command-line option to not search modules from global paths like `$HOME/.node_modules` and `$NODE_PATH`. +* [[`fe920b6cbf`](https://github.com/nodejs/node/commit/fe920b6cbf)] - **(SEMVER-MINOR)** **src**: make napi\_create\_reference accept symbol (JckXia) [#39926](https://github.com/nodejs/node/pull/39926) +* [[`97f3072ceb`](https://github.com/nodejs/node/commit/97f3072ceb)] - **(SEMVER-MINOR)** **stream**: add signal support to pipeline generators (Robert Nagy) [#39067](https://github.com/nodejs/node/pull/39067) + +### Commits + +* [[`b7dc651884`](https://github.com/nodejs/node/commit/b7dc651884)] - **build**: run modified internet tests on GitHub Actions (Rich Trott) [#40100](https://github.com/nodejs/node/pull/40100) +* [[`8d5787a043`](https://github.com/nodejs/node/commit/8d5787a043)] - **build**: add .mailmap/AUTHORS to paths-ignore for test-macos (Rich Trott) [#40109](https://github.com/nodejs/node/pull/40109) +* [[`9793e7ff08`](https://github.com/nodejs/node/commit/9793e7ff08)] - **build**: add .mailmap/AUTHORS to path-ignore for test-asan (Rich Trott) [#40109](https://github.com/nodejs/node/pull/40109) +* [[`886921de38`](https://github.com/nodejs/node/commit/886921de38)] - **build**: add paths-ignore for build-tarball workflow (Rich Trott) [#40109](https://github.com/nodejs/node/pull/40109) +* [[`01b1946b38`](https://github.com/nodejs/node/commit/01b1946b38)] - **build**: only lint version numbers for pull requests (Michaël Zasso) [#40027](https://github.com/nodejs/node/pull/40027) +* [[`c804d070a6`](https://github.com/nodejs/node/commit/c804d070a6)] - **build**: add daily/on-demand internet test workflow (Rich Trott) [#40086](https://github.com/nodejs/node/pull/40086) +* [[`7bddaecbf4`](https://github.com/nodejs/node/commit/7bddaecbf4)] - **build**: add YAML linting to GitHub Actions (Rich Trott) [#40007](https://github.com/nodejs/node/pull/40007) +* [[`5a20f9055c`](https://github.com/nodejs/node/commit/5a20f9055c)] - **build**: add YAML linting (Rich Trott) [#40007](https://github.com/nodejs/node/pull/40007) +* [[`0b30867c08`](https://github.com/nodejs/node/commit/0b30867c08)] - **build**: run AUTHORS update weekly (Rich Trott) [#40004](https://github.com/nodejs/node/pull/40004) +* [[`22a78a75ee`](https://github.com/nodejs/node/commit/22a78a75ee)] - **build**: preserves symbols during LTO with macOS linker (Jesse Chan) [#39839](https://github.com/nodejs/node/pull/39839) +* [[`f0dec58d43`](https://github.com/nodejs/node/commit/f0dec58d43)] - **crypto**: fix webcrypto ed(25519|448) spki/pkcs8 import (Filip Skokan) [#40131](https://github.com/nodejs/node/pull/40131) +* [[`d80082f3eb`](https://github.com/nodejs/node/commit/d80082f3eb)] - **crypto**: use `validateObject` (Voltrex) [#39872](https://github.com/nodejs/node/pull/39872) +* [[`d657ae6f8a`](https://github.com/nodejs/node/commit/d657ae6f8a)] - **crypto**: fix RSA-PSS default saltLength (Tobias Nießen) [#39999](https://github.com/nodejs/node/pull/39999) +* [[`fc45cbe7a8`](https://github.com/nodejs/node/commit/fc45cbe7a8)] - **crypto**: fix default MGF1 hash for OpenSSL 3 (Tobias Nießen) [#40031](https://github.com/nodejs/node/pull/40031) +* [[`105c9e6d3b`](https://github.com/nodejs/node/commit/105c9e6d3b)] - **crypto**: check webcrypto asymmetric key types during importKey (Filip Skokan) [#39962](https://github.com/nodejs/node/pull/39962) +* [[`fb226ff2ee`](https://github.com/nodejs/node/commit/fb226ff2ee)] - **(SEMVER-MINOR)** **crypto**: add rsa-pss keygen parameters (Filip Skokan) [#39927](https://github.com/nodejs/node/pull/39927) +* [[`85206b7311`](https://github.com/nodejs/node/commit/85206b7311)] - **deps**: upgrade npm to 7.24.0 (npm team) [#40167](https://github.com/nodejs/node/pull/40167) +* [[`06f6e01f37`](https://github.com/nodejs/node/commit/06f6e01f37)] - **deps**: add riscv64 into openssl Makefile and gen openssl-riscv64 (Lu Yahan) [#40063](https://github.com/nodejs/node/pull/40063) +* [[`9c76c69972`](https://github.com/nodejs/node/commit/9c76c69972)] - **deps**: patch V8 to 9.3.345.19 (Michaël Zasso) [#40108](https://github.com/nodejs/node/pull/40108) +* [[`0df47d5843`](https://github.com/nodejs/node/commit/0df47d5843)] - **deps**: upgrade npm to 7.23.0 (npm team) [#40055](https://github.com/nodejs/node/pull/40055) +* [[`b3843bf417`](https://github.com/nodejs/node/commit/b3843bf417)] - **deps**: patch v8 for vs2019 in std17 (Jiawen Geng) [#40060](https://github.com/nodejs/node/pull/40060) +* [[`67759585a0`](https://github.com/nodejs/node/commit/67759585a0)] - **deps**: patch for v8 on windows (Jiawen Geng) [#40010](https://github.com/nodejs/node/pull/40010) +* [[`98f56d179c`](https://github.com/nodejs/node/commit/98f56d179c)] - **deps**: update Acorn to v8.5.0 (Michaël Zasso) [#40015](https://github.com/nodejs/node/pull/40015) +* [[`5c6708582e`](https://github.com/nodejs/node/commit/5c6708582e)] - **dns**: cleanup validation (Voltrex) [#40061](https://github.com/nodejs/node/pull/40061) +* [[`e4825dcfd5`](https://github.com/nodejs/node/commit/e4825dcfd5)] - **doc**: changes default values for fs.read fns (RISHABH BUDHIRAJA) [#39163](https://github.com/nodejs/node/pull/39163) +* [[`0254b4b0d3`](https://github.com/nodejs/node/commit/0254b4b0d3)] - **doc**: fix markdown indentation in lists (Michaël Zasso) [#40142](https://github.com/nodejs/node/pull/40142) +* [[`b6939a3419`](https://github.com/nodejs/node/commit/b6939a3419)] - **doc**: prepare README.md for stricter linting (Rich Trott) [#40137](https://github.com/nodejs/node/pull/40137) +* [[`a07d8444f9`](https://github.com/nodejs/node/commit/a07d8444f9)] - **doc**: fix comma splice (Rich Trott) [#40133](https://github.com/nodejs/node/pull/40133) +* [[`2488bc0c4f`](https://github.com/nodejs/node/commit/2488bc0c4f)] - **doc**: clean up weird notes about reentrancy (Anna Henningsen) [#40107](https://github.com/nodejs/node/pull/40107) +* [[`8b80dcbc30`](https://github.com/nodejs/node/commit/8b80dcbc30)] - **doc**: correct parameters in fs and stream documentation (vipul kumar) [#39984](https://github.com/nodejs/node/pull/39984) +* [[`1ced732078`](https://github.com/nodejs/node/commit/1ced732078)] - **doc**: fix CJS-ESM selector in Safari (Bradley Farias) [#40135](https://github.com/nodejs/node/pull/40135) +* [[`7fdb12739d`](https://github.com/nodejs/node/commit/7fdb12739d)] - **doc**: add timeout.close (Nikita Galkin) [#40036](https://github.com/nodejs/node/pull/40036) +* [[`81cb14bb58`](https://github.com/nodejs/node/commit/81cb14bb58)] - **doc**: clarify that ObjectWrap requires manual cleanup on shutdown (Gerhard Stöbich) [#40074](https://github.com/nodejs/node/pull/40074) +* [[`8aad81dd99`](https://github.com/nodejs/node/commit/8aad81dd99)] - **doc**: add full list of subsystems (FrankQiu) [#39971](https://github.com/nodejs/node/pull/39971) +* [[`9655329772`](https://github.com/nodejs/node/commit/9655329772)] - **doc**: add Ayase-252 to collaborators (Qingyu Deng) [#40078](https://github.com/nodejs/node/pull/40078) +* [[`6d399e11e9`](https://github.com/nodejs/node/commit/6d399e11e9)] - **doc**: fix CCM cipher example in MJS (Tobias Nießen) [#39949](https://github.com/nodejs/node/pull/39949) +* [[`d426ee9b17`](https://github.com/nodejs/node/commit/d426ee9b17)] - **doc**: fix property name 'detail' of performanceEntry (Christian Boehlke) [#40019](https://github.com/nodejs/node/pull/40019) +* [[`846e7e880e`](https://github.com/nodejs/node/commit/846e7e880e)] - **doc**: fix list indentation in corepack.md (Alexey Ten) [#40029](https://github.com/nodejs/node/pull/40029) +* [[`b6dd2ea930`](https://github.com/nodejs/node/commit/b6dd2ea930)] - **doc**: fix missing history version in `fs.md` (Antoine du Hamel) [#39972](https://github.com/nodejs/node/pull/39972) +* [[`f666f5a8d1`](https://github.com/nodejs/node/commit/f666f5a8d1)] - **events**: fix duplicate require which cause performance penalty (wwwzbwcom) [#39892](https://github.com/nodejs/node/pull/39892) +* [[`59fff925be`](https://github.com/nodejs/node/commit/59fff925be)] - **(SEMVER-MINOR)** **fs**: make `open` and `close` stream override optional when unused (Antoine du Hamel) [#40013](https://github.com/nodejs/node/pull/40013) +* [[`a63a4bce90`](https://github.com/nodejs/node/commit/a63a4bce90)] - **(SEMVER-MINOR)** **http**: limit requests per connection (Artur K) [#40082](https://github.com/nodejs/node/pull/40082) +* [[`bc9c2ca6af`](https://github.com/nodejs/node/commit/bc9c2ca6af)] - **http**: remove CRLF variable (shfshanyue) [#40101](https://github.com/nodejs/node/pull/40101) +* [[`dd50b91f77`](https://github.com/nodejs/node/commit/dd50b91f77)] - **lib**: remove useless statement (Maledong) [#39983](https://github.com/nodejs/node/pull/39983) +* [[`608528028c`](https://github.com/nodejs/node/commit/608528028c)] - **lib**: avoid creating a throw away object in `validateObject` (Antoine du Hamel) [#39807](https://github.com/nodejs/node/pull/39807) +* [[`edcfffeaea`](https://github.com/nodejs/node/commit/edcfffeaea)] - **lib**: use standard property names (null) [#39981](https://github.com/nodejs/node/pull/39981) +* [[`640353af86`](https://github.com/nodejs/node/commit/640353af86)] - **lib,repl**: ignore non-canBeRequiredByUsers built-in (Khaidi Chu) [#39942](https://github.com/nodejs/node/pull/39942) +* [[`4444b5c938`](https://github.com/nodejs/node/commit/4444b5c938)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#40148](https://github.com/nodejs/node/pull/40148) +* [[`4993318862`](https://github.com/nodejs/node/commit/4993318862)] - **meta**: update GeoffreyBooth email addresses in AUTHORS and .mailmap (Rich Trott) [#40132](https://github.com/nodejs/node/pull/40132) +* [[`98d42fa1f4`](https://github.com/nodejs/node/commit/98d42fa1f4)] - **meta**: add mailmap entry for LPardue (Rich Trott) [#40129](https://github.com/nodejs/node/pull/40129) +* [[`effdfa91be`](https://github.com/nodejs/node/commit/effdfa91be)] - **meta**: update GeoffreyBooth email address (Geoffrey Booth) [#40102](https://github.com/nodejs/node/pull/40102) +* [[`588257c00a`](https://github.com/nodejs/node/commit/588257c00a)] - **meta**: add .mailmap entry for arcanis (Rich Trott) [#40103](https://github.com/nodejs/node/pull/40103) +* [[`7ee3fbd1e0`](https://github.com/nodejs/node/commit/7ee3fbd1e0)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#40087](https://github.com/nodejs/node/pull/40087) +* [[`2a41530a5e`](https://github.com/nodejs/node/commit/2a41530a5e)] - **meta**: consolidate AUTHORS entry for mikemaccana (Rich Trott) [#40051](https://github.com/nodejs/node/pull/40051) +* [[`a71579b05e`](https://github.com/nodejs/node/commit/a71579b05e)] - **meta**: add more mailmap entries for bajtos (Rich Trott) [#40023](https://github.com/nodejs/node/pull/40023) +* [[`29104f5e64`](https://github.com/nodejs/node/commit/29104f5e64)] - **meta**: consolidate AUTHORS entries for mithunsasidharan (Rich Trott) [#40003](https://github.com/nodejs/node/pull/40003) +* [[`381293f54a`](https://github.com/nodejs/node/commit/381293f54a)] - **meta**: update AUTHORS (Node.js GitHub Bot) [#39957](https://github.com/nodejs/node/pull/39957) +* [[`1eca9bc5b2`](https://github.com/nodejs/node/commit/1eca9bc5b2)] - **module**: support pattern trailers for imports field (Guy Bedford) [#40041](https://github.com/nodejs/node/pull/40041) +* [[`7376edca6d`](https://github.com/nodejs/node/commit/7376edca6d)] - **module**: deprecate trailing slash pattern mappings (Guy Bedford) [#40039](https://github.com/nodejs/node/pull/40039) +* [[`92f182b23d`](https://github.com/nodejs/node/commit/92f182b23d)] - **module**: fix $ pattern replacements (Guy Bedford) [#40044](https://github.com/nodejs/node/pull/40044) +* [[`d6124d8259`](https://github.com/nodejs/node/commit/d6124d8259)] - **repl**: fix top level await with surrogate characters (Mestery) [#39931](https://github.com/nodejs/node/pull/39931) +* [[`9a672961fa`](https://github.com/nodejs/node/commit/9a672961fa)] - **(SEMVER-MINOR)** **src**: add --no-global-search-paths cli option (Cheng Zhao) [#39754](https://github.com/nodejs/node/pull/39754) +* [[`51f9ad4897`](https://github.com/nodejs/node/commit/51f9ad4897)] - **(SEMVER-MINOR)** **src**: add option to disable global search paths (Cheng Zhao) [#39754](https://github.com/nodejs/node/pull/39754) +* [[`95528b284d`](https://github.com/nodejs/node/commit/95528b284d)] - **src**: remove unnecessary comment and add a CHECK in crypto\_tls.cc (Darshan Sen) [#39991](https://github.com/nodejs/node/pull/39991) +* [[`31994fbf8e`](https://github.com/nodejs/node/commit/31994fbf8e)] - **src**: register zlib external references for snapshot (Joyee Cheung) [#40050](https://github.com/nodejs/node/pull/40050) +* [[`cfcd57182b`](https://github.com/nodejs/node/commit/cfcd57182b)] - **src**: fix -Wunreachable-code-return error (Shelley Vohr) [#40034](https://github.com/nodejs/node/pull/40034) +* [[`9f3a015b60`](https://github.com/nodejs/node/commit/9f3a015b60)] - **src**: add option to disable loading native addons (Dominic Elm) [#39977](https://github.com/nodejs/node/pull/39977) +* [[`570bef1710`](https://github.com/nodejs/node/commit/570bef1710)] - ***Revert*** "**src**: skip test\_fatal/test\_threads for Debug builds" (Anna Henningsen) [#39954](https://github.com/nodejs/node/pull/39954) +* [[`842f936e04`](https://github.com/nodejs/node/commit/842f936e04)] - **src**: use Isolate::TryGetCurrent where appropriate (Anna Henningsen) [#39954](https://github.com/nodejs/node/pull/39954) +* [[`fe920b6cbf`](https://github.com/nodejs/node/commit/fe920b6cbf)] - **(SEMVER-MINOR)** **src**: make napi\_create\_reference accept symbol (JckXia) [#39926](https://github.com/nodejs/node/pull/39926) +* [[`73aa4e34ff`](https://github.com/nodejs/node/commit/73aa4e34ff)] - **src**: fix C4805 MSVC warning (Michaël Zasso) [#39998](https://github.com/nodejs/node/pull/39998) +* [[`826eee363c`](https://github.com/nodejs/node/commit/826eee363c)] - **src**: register external references of PipeWrap for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`7a17cbfdea`](https://github.com/nodejs/node/commit/7a17cbfdea)] - **src**: register external references of TTYWrap for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`00cca48081`](https://github.com/nodejs/node/commit/00cca48081)] - **src**: register external references of TCPWrap for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`6095fb07b6`](https://github.com/nodejs/node/commit/6095fb07b6)] - **src**: register external references of SignalWrap for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`db75711c5c`](https://github.com/nodejs/node/commit/db75711c5c)] - **src**: register missing process methods external references (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`b4e074c295`](https://github.com/nodejs/node/commit/b4e074c295)] - **src**: register missing stream wrap external references (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`a2c1c3ef64`](https://github.com/nodejs/node/commit/a2c1c3ef64)] - **src**: register external references of BaseObject for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`6fdf02523e`](https://github.com/nodejs/node/commit/6fdf02523e)] - **src**: register external references of node-report for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`bef78a2f88`](https://github.com/nodejs/node/commit/bef78a2f88)] - **src**: register external references of dtrace for snapshot (Joyee Cheung) [#39961](https://github.com/nodejs/node/pull/39961) +* [[`97f3072ceb`](https://github.com/nodejs/node/commit/97f3072ceb)] - **(SEMVER-MINOR)** **stream**: add signal support to pipeline generators (Robert Nagy) [#39067](https://github.com/nodejs/node/pull/39067) +* [[`6be405bd7b`](https://github.com/nodejs/node/commit/6be405bd7b)] - **test**: fix test-dgram-udp6-link-local-address on Windows (Michaël Zasso) [#40005](https://github.com/nodejs/node/pull/40005) +* [[`ec94bec9a3`](https://github.com/nodejs/node/commit/ec94bec9a3)] - **test**: do not run `test-corepack-yarn-install` with no internet (Antoine du Hamel) [#40090](https://github.com/nodejs/node/pull/40090) +* [[`4aa2610252`](https://github.com/nodejs/node/commit/4aa2610252)] - **test**: update OpenSSL3 error messages for 3.0.0+quic (Daniel Bevenius) [#40093](https://github.com/nodejs/node/pull/40093) +* [[`4367a61a9b`](https://github.com/nodejs/node/commit/4367a61a9b)] - **test**: mark test-crypto-timing-safe-equal-benchmarks flaky (Richard Lau) [#40065](https://github.com/nodejs/node/pull/40065) +* [[`5b5e27281c`](https://github.com/nodejs/node/commit/5b5e27281c)] - **test**: fix internet/test-dns (Rich Trott) [#40083](https://github.com/nodejs/node/pull/40083) +* [[`67bbfeb7e1`](https://github.com/nodejs/node/commit/67bbfeb7e1)] - **test**: make tests pass on Windows with Unix EOL (Michaël Zasso) [#40002](https://github.com/nodejs/node/pull/40002) +* [[`a8c99d9f09`](https://github.com/nodejs/node/commit/a8c99d9f09)] - **tools**: update doc generator dependencies (Michaël Zasso) [#40042](https://github.com/nodejs/node/pull/40042) +* [[`ec6de1195a`](https://github.com/nodejs/node/commit/ec6de1195a)] - **tools**: update ansi-regex in lint-md rollup (Rich Trott) [#40112](https://github.com/nodejs/node/pull/40112) +* [[`d55804ca4e`](https://github.com/nodejs/node/commit/d55804ca4e)] - **tools**: update all dependencies of markdown linter (Michaël Zasso) [#40035](https://github.com/nodejs/node/pull/40035) +* [[`f03bae7c82`](https://github.com/nodejs/node/commit/f03bae7c82)] - **tools**: update remark-html to v13.0.2 (Michaël Zasso) [#40043](https://github.com/nodejs/node/pull/40043) +* [[`99af21292f`](https://github.com/nodejs/node/commit/99af21292f)] - **tools,build**: update YAML files in preparation for linting (Rich Trott) [#40007](https://github.com/nodejs/node/pull/40007) +* [[`590ace418d`](https://github.com/nodejs/node/commit/590ace418d)] - **tools,doc**: fix misrendering of consecutive JS blocks (Rich Trott) [#40146](https://github.com/nodejs/node/pull/40146) +* [[`5983568204`](https://github.com/nodejs/node/commit/5983568204)] - **worker**: avoid potential deadlock on NearHeapLimit (Santiago Gimeno) [#38403](https://github.com/nodejs/node/pull/38403) + ## 2021-09-10, Version 16.9.1 (Current), @richardlau diff --git a/src/node_version.h b/src/node_version.h index 8427c1b55f00d3..a572d9b9585373 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 16 -#define NODE_MINOR_VERSION 9 -#define NODE_PATCH_VERSION 2 +#define NODE_MINOR_VERSION 10 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)