diff --git a/.mailmap b/.mailmap index 42c32d0ab4419..a14e7b8287bc8 100644 --- a/.mailmap +++ b/.mailmap @@ -28,7 +28,8 @@ Geoff Flarity Gregers Gram Rygg Ifeanyi Oraelosi Isaac Z. Schlueter -Isaac Z. Schlueter isaacs +Isaac Z. Schlueter +isaacs Jake Verbaten James Sanders James Treworgy diff --git a/CHANGELOG.md b/CHANGELOG.md index 58b2f49bd9dad..c8e6de1ae73ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,48 @@ +## v6.10.1 (2019-07-11): + +### BUGFIXES + +* [`3cbd57712`](https://github.com/npm/cli/commit/3cbd577120a9da6e51bb8b13534d1bf71ea5712c) + fix(git): strip GIT environs when running git + ([@isaacs](https://github.com/isaacs)) +* [`a81a8c4c4`](https://github.com/npm/cli/commit/a81a8c4c466f510215a51cef1bb08544d11844fe) + [#206](https://github.com/npm/cli/pull/206) improve isOnly(Dev,Optional) + ([@larsgw](https://github.com/larsgw)) +* [`172f9aca6`](https://github.com/npm/cli/commit/172f9aca67a66ee303c17f90a994cd52fc66552a) + [#179](https://github.com/npm/cli/pull/179) fix-xmas-underline + ([@raywu0123](https://github.com/raywu0123)) + +### DEPENDENCIES + +* [`429226a5e`](https://github.com/npm/cli/commit/429226a5e992cd907d4f19bd738037007cf78c1f) + `lru-cache@5.1.1` ([@isaacs](https://github.com/isaacs)) +* [`175670ea6`](https://github.com/npm/cli/commit/175670ea65cca03f8b2e957df3dd4b8b0efd0e1f) + `npm-registry-fetch@3.9.1`: ([@isaacs](https://github.com/isaacs)) +* [`0d0517f7f`](https://github.com/npm/cli/commit/0d0517f7f8c902b5064ac18fb4015b31750ad2b2) + `call-limit@1.1.1` ([@isaacs](https://github.com/isaacs)) +* [`741400429`](https://github.com/npm/cli/commit/74140042917ea241062a812ceb65c5423c2bafa9) + `glob@7.1.4` ([@isaacs](https://github.com/isaacs)) +* [`bddd60e30`](https://github.com/npm/cli/commit/bddd60e302283a4a70d35f8f742e42bd13f4dabf) + `inherits@2.0.4` ([@isaacs](https://github.com/isaacs)) +* [`4acf03fd1`](https://github.com/npm/cli/commit/4acf03fd140ed3ddb2dcf3fdc9756bc3f5a8bcbb) + `libnpmsearch@2.0.1` ([@isaacs](https://github.com/isaacs)) +* [`c2bd17291`](https://github.com/npm/cli/commit/c2bd17291a86bea7ced2fbd07d66d013bd7a7560) + `marked@0.6.3` ([@isaacs](https://github.com/isaacs)) +* [`7f0221bb1`](https://github.com/npm/cli/commit/7f0221bb1bb41ffc933c785940e227feae38c80c) + `marked-man@0.6.0` ([@isaacs](https://github.com/isaacs)) +* [`f458fe7dd`](https://github.com/npm/cli/commit/f458fe7dd3bebddf603aaae183a424ea8aaa018f) + `npm-lifecycle@2.1.1` ([@isaacs](https://github.com/isaacs)) +* [`009752978`](https://github.com/npm/cli/commit/0097529780269c28444f1efa0d7c131d47a933eb) + `node-gyp@4.0.0` ([@isaacs](https://github.com/isaacs)) +* [`0fa2bb438`](https://github.com/npm/cli/commit/0fa2bb4386379d6e9d8c95db08662ec0529964f9) + `query-string@6.8.1` ([@isaacs](https://github.com/isaacs)) +* [`b86450929`](https://github.com/npm/cli/commit/b86450929796950a1fe4b1f9b02b1634c812f3bb) + `tar-stream@2.1.0` ([@isaacs](https://github.com/isaacs)) +* [`25db00fe9`](https://github.com/npm/cli/commit/25db00fe953453198adb9e1bd71d1bc2a9f04aaa) + `worker-farm@1.7.0` ([@isaacs](https://github.com/isaacs)) +* [`8dfbe8610`](https://github.com/npm/cli/commit/8dfbe861085dfa8fa56bb504b4a00fba04c34f9d) + `readable-stream@3.4.0` ([@isaacs](https://github.com/isaacs)) + ## v6.10.0 (2019-07-03): ### FEATURES diff --git a/lib/fetch-package-metadata.js b/lib/fetch-package-metadata.js index 78eed42bdf000..c4f46f513fed2 100644 --- a/lib/fetch-package-metadata.js +++ b/lib/fetch-package-metadata.js @@ -26,7 +26,8 @@ function andLogAndFinish (spec, tracker, done) { } } -const CACHE = require('lru-cache')({ +const LRUCache = require('lru-cache') +const CACHE = new LRUCache({ max: 300 * 1024 * 1024, length: (p) => p._contentLength }) diff --git a/lib/install/is-only-dev.js b/lib/install/is-only-dev.js index 2877c61a227d0..804497393eaa3 100644 --- a/lib/install/is-only-dev.js +++ b/lib/install/is-only-dev.js @@ -28,9 +28,10 @@ function andIsOnlyDev (name, seen) { return isDev && !isProd } else { if (seen.has(req)) return true - seen = new Set(seen) seen.add(req) - return isOnlyDev(req, seen) + const result = isOnlyDev(req, seen) + seen.delete(req) + return result } } } diff --git a/lib/install/is-only-optional.js b/lib/install/is-only-optional.js index 81e227bae7a89..ea27eadcfa809 100644 --- a/lib/install/is-only-optional.js +++ b/lib/install/is-only-optional.js @@ -11,11 +11,12 @@ function isOptional (node, seen) { if (seen.has(node) || node.requiredBy.length === 0) { return false } - seen = new Set(seen) seen.add(node) const swOptional = node.fromShrinkwrap && node.package._optional - return node.requiredBy.every(function (req) { + const result = node.requiredBy.every(function (req) { if (req.fakeChild && swOptional) return true return isOptDep(req, moduleName(node)) || isOptional(req, seen) }) + seen.delete(node) + return result } diff --git a/lib/utils/git.js b/lib/utils/git.js index 6770853dd9622..1951640e81568 100644 --- a/lib/utils/git.js +++ b/lib/utils/git.js @@ -28,6 +28,16 @@ function execGit (args, options, cb) { function spawnGit (args, options) { log.info('git', args) + // If we're already in a git command (eg, running test as an exec + // line in an interactive rebase) then these environment variables + // will force git to operate on the current project, instead of + // checking out/fetching/etc. whatever the user actually intends. + options.env = options.env || Object.keys(process.env) + .filter(k => !/^GIT/.test(k)) + .reduce((set, k) => { + set[k] = process.env[k] + return set + }, {}) return spawn(git, prefixGitArgs().concat(args || []), options) } diff --git a/lib/xmas.js b/lib/xmas.js index 65c0c131abd48..81ab59c8e2236 100644 --- a/lib/xmas.js +++ b/lib/xmas.js @@ -11,7 +11,7 @@ module.exports = function (args, cb) { '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u0020', '\u2E1B', '\u2042', '\u2E2E', '&', '@', '\uFF61' ] - var oc = [21, 33, 34, 35, 36, 37] + var oc = [33, 34, 35, 36, 37] var l = '\u005e' function w (s) { process.stderr.write(s) } @@ -25,6 +25,7 @@ module.exports = function (args, cb) { var O = L * 2 - 2 var S = (M - O) / 2 for (i = 0; i < S; i++) w(' ') + w(x + '\u001b[21m') w(x + '\u001b[32m' + f) for (i = 0; i < O; i++) { w( @@ -33,6 +34,7 @@ module.exports = function (args, cb) { ) } w(x + '\u001b[32m' + b + '\n') + w(x + '\u001b[0m') } w(' ') for (i = 1; i < H; i++) w('\u001b[32m' + l) diff --git a/node_modules/block-stream/LICENCE b/node_modules/block-stream/LICENCE deleted file mode 100644 index 74489e2e2658e..0000000000000 --- a/node_modules/block-stream/LICENCE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) Isaac Z. Schlueter -All rights reserved. - -The BSD License - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS -``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS -BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/block-stream/README.md b/node_modules/block-stream/README.md deleted file mode 100644 index c16e9c468891d..0000000000000 --- a/node_modules/block-stream/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# block-stream - -A stream of blocks. - -Write data into it, and it'll output data in buffer blocks the size you -specify, padding with zeroes if necessary. - -```javascript -var block = new BlockStream(512) -fs.createReadStream("some-file").pipe(block) -block.pipe(fs.createWriteStream("block-file")) -``` - -When `.end()` or `.flush()` is called, it'll pad the block with zeroes. diff --git a/node_modules/block-stream/block-stream.js b/node_modules/block-stream/block-stream.js deleted file mode 100644 index 008de035c2edc..0000000000000 --- a/node_modules/block-stream/block-stream.js +++ /dev/null @@ -1,209 +0,0 @@ -// write data to it, and it'll emit data in 512 byte blocks. -// if you .end() or .flush(), it'll emit whatever it's got, -// padded with nulls to 512 bytes. - -module.exports = BlockStream - -var Stream = require("stream").Stream - , inherits = require("inherits") - , assert = require("assert").ok - , debug = process.env.DEBUG ? console.error : function () {} - -function BlockStream (size, opt) { - this.writable = this.readable = true - this._opt = opt || {} - this._chunkSize = size || 512 - this._offset = 0 - this._buffer = [] - this._bufferLength = 0 - if (this._opt.nopad) this._zeroes = false - else { - this._zeroes = new Buffer(this._chunkSize) - for (var i = 0; i < this._chunkSize; i ++) { - this._zeroes[i] = 0 - } - } -} - -inherits(BlockStream, Stream) - -BlockStream.prototype.write = function (c) { - // debug(" BS write", c) - if (this._ended) throw new Error("BlockStream: write after end") - if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "") - if (c.length) { - this._buffer.push(c) - this._bufferLength += c.length - } - // debug("pushed onto buffer", this._bufferLength) - if (this._bufferLength >= this._chunkSize) { - if (this._paused) { - // debug(" BS paused, return false, need drain") - this._needDrain = true - return false - } - this._emitChunk() - } - return true -} - -BlockStream.prototype.pause = function () { - // debug(" BS pausing") - this._paused = true -} - -BlockStream.prototype.resume = function () { - // debug(" BS resume") - this._paused = false - return this._emitChunk() -} - -BlockStream.prototype.end = function (chunk) { - // debug("end", chunk) - if (typeof chunk === "function") cb = chunk, chunk = null - if (chunk) this.write(chunk) - this._ended = true - this.flush() -} - -BlockStream.prototype.flush = function () { - this._emitChunk(true) -} - -BlockStream.prototype._emitChunk = function (flush) { - // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused) - - // emit a chunk - if (flush && this._zeroes) { - // debug(" BS push zeroes", this._bufferLength) - // push a chunk of zeroes - var padBytes = (this._bufferLength % this._chunkSize) - if (padBytes !== 0) padBytes = this._chunkSize - padBytes - if (padBytes > 0) { - // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes)) - this._buffer.push(this._zeroes.slice(0, padBytes)) - this._bufferLength += padBytes - // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength) - } - } - - if (this._emitting || this._paused) return - this._emitting = true - - // debug(" BS entering loops") - var bufferIndex = 0 - while (this._bufferLength >= this._chunkSize && - (flush || !this._paused)) { - // debug(" BS data emission loop", this._bufferLength) - - var out - , outOffset = 0 - , outHas = this._chunkSize - - while (outHas > 0 && (flush || !this._paused) ) { - // debug(" BS data inner emit loop", this._bufferLength) - var cur = this._buffer[bufferIndex] - , curHas = cur.length - this._offset - // debug("cur=", cur) - // debug("curHas=%j", curHas) - // If it's not big enough to fill the whole thing, then we'll need - // to copy multiple buffers into one. However, if it is big enough, - // then just slice out the part we want, to save unnecessary copying. - // Also, need to copy if we've already done some copying, since buffers - // can't be joined like cons strings. - if (out || curHas < outHas) { - out = out || new Buffer(this._chunkSize) - cur.copy(out, outOffset, - this._offset, this._offset + Math.min(curHas, outHas)) - } else if (cur.length === outHas && this._offset === 0) { - // shortcut -- cur is exactly long enough, and no offset. - out = cur - } else { - // slice out the piece of cur that we need. - out = cur.slice(this._offset, this._offset + outHas) - } - - if (curHas > outHas) { - // means that the current buffer couldn't be completely output - // update this._offset to reflect how much WAS written - this._offset += outHas - outHas = 0 - } else { - // output the entire current chunk. - // toss it away - outHas -= curHas - outOffset += curHas - bufferIndex ++ - this._offset = 0 - } - } - - this._bufferLength -= this._chunkSize - assert(out.length === this._chunkSize) - // debug("emitting data", out) - // debug(" BS emitting, paused=%j", this._paused, this._bufferLength) - this.emit("data", out) - out = null - } - // debug(" BS out of loops", this._bufferLength) - - // whatever is left, it's not enough to fill up a block, or we're paused - this._buffer = this._buffer.slice(bufferIndex) - if (this._paused) { - // debug(" BS paused, leaving", this._bufferLength) - this._needsDrain = true - this._emitting = false - return - } - - // if flushing, and not using null-padding, then need to emit the last - // chunk(s) sitting in the queue. We know that it's not enough to - // fill up a whole block, because otherwise it would have been emitted - // above, but there may be some offset. - var l = this._buffer.length - if (flush && !this._zeroes && l) { - if (l === 1) { - if (this._offset) { - this.emit("data", this._buffer[0].slice(this._offset)) - } else { - this.emit("data", this._buffer[0]) - } - } else { - var outHas = this._bufferLength - , out = new Buffer(outHas) - , outOffset = 0 - for (var i = 0; i < l; i ++) { - var cur = this._buffer[i] - , curHas = cur.length - this._offset - cur.copy(out, outOffset, this._offset) - this._offset = 0 - outOffset += curHas - this._bufferLength -= curHas - } - this.emit("data", out) - } - // truncate - this._buffer.length = 0 - this._bufferLength = 0 - this._offset = 0 - } - - // now either drained or ended - // debug("either draining, or ended", this._bufferLength, this._ended) - // means that we've flushed out all that we can so far. - if (this._needDrain) { - // debug("emitting drain", this._bufferLength) - this._needDrain = false - this.emit("drain") - } - - if ((this._bufferLength === 0) && this._ended && !this._endEmitted) { - // debug("emitting end", this._bufferLength) - this._endEmitted = true - this.emit("end") - } - - this._emitting = false - - // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize) -} diff --git a/node_modules/block-stream/package.json b/node_modules/block-stream/package.json deleted file mode 100644 index d7d591cf904b0..0000000000000 --- a/node_modules/block-stream/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "_from": "block-stream@*", - "_id": "block-stream@0.0.9", - "_inBundle": false, - "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", - "_location": "/block-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "block-stream@*", - "name": "block-stream", - "escapedName": "block-stream", - "rawSpec": "*", - "saveSpec": null, - "fetchSpec": "*" - }, - "_requiredBy": [ - "/node-gyp/tar" - ], - "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", - "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a", - "_spec": "block-stream@*", - "_where": "/Users/rebecca/code/npm/node_modules/node-gyp/node_modules/tar", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/block-stream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "inherits": "~2.0.0" - }, - "deprecated": false, - "description": "a stream of blocks", - "devDependencies": { - "tap": "^5.7.1" - }, - "engines": { - "node": "0.4 || >=0.5.8" - }, - "files": [ - "block-stream.js" - ], - "homepage": "https://github.com/isaacs/block-stream#readme", - "license": "ISC", - "main": "block-stream.js", - "name": "block-stream", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/block-stream.git" - }, - "scripts": { - "test": "tap test/*.js --cov" - }, - "version": "0.0.9" -} diff --git a/node_modules/cacache/node_modules/lru-cache/index.js b/node_modules/cacache/node_modules/lru-cache/index.js deleted file mode 100644 index 573b6b85b9779..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/index.js +++ /dev/null @@ -1,334 +0,0 @@ -'use strict' - -// A linked list to keep track of recently-used-ness -const Yallist = require('yallist') - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) - } - get max () { - return this[MAX] - } - - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] - } - - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] - } - - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength - - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) - } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } - - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev - } - } - - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next - } - } - - keys () { - return this[LRU_LIST].toArray().map(k => k.key) - } - - values () { - return this[LRU_LIST].toArray().map(k => k.value) - } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } - - dumpLru () { - return this[LRU_LIST] - } - - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] - - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') - - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } - - const node = this[CACHE].get(key) - const item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true - } - - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false - } - - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - - peek (key) { - return get(this, key, false) - } - - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null - - del(this, node) - return node.value - } - - del (key) { - del(this, this[CACHE].get(key)) - } - - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } - } - - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} - -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} - -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} - -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined - } - if (hit) - fn.call(thisp, hit.value, hit.key, self) -} - -module.exports = LRUCache diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json deleted file mode 100644 index 1d41a07afbda4..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "_from": "lru-cache@^5.1.1", - "_id": "lru-cache@5.1.1", - "_inBundle": false, - "_integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "_location": "/cacache/lru-cache", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "lru-cache@^5.1.1", - "name": "lru-cache", - "escapedName": "lru-cache", - "rawSpec": "^5.1.1", - "saveSpec": null, - "fetchSpec": "^5.1.1" - }, - "_requiredBy": [ - "/cacache" - ], - "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "_shasum": "1da27e6710271947695daf6848e847f01d84b920", - "_spec": "lru-cache@^5.1.1", - "_where": "/Users/aeschright/code/cli/node_modules/cacache", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me" - }, - "bugs": { - "url": "https://github.com/isaacs/node-lru-cache/issues" - }, - "bundleDependencies": false, - "dependencies": { - "yallist": "^3.0.2" - }, - "deprecated": false, - "description": "A cache object that deletes the least-recently-used items.", - "devDependencies": { - "benchmark": "^2.1.4", - "tap": "^12.1.0" - }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/isaacs/node-lru-cache#readme", - "keywords": [ - "mru", - "lru", - "cache" - ], - "license": "ISC", - "main": "index.js", - "name": "lru-cache", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-lru-cache.git" - }, - "scripts": { - "coveragerport": "tap --coverage-report=html", - "postpublish": "git push origin --all; git push origin --tags", - "postversion": "npm publish", - "preversion": "npm test", - "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", - "test": "tap test/*.js --100 -J" - }, - "version": "5.1.1" -} diff --git a/node_modules/cacache/node_modules/yallist/LICENSE b/node_modules/cacache/node_modules/yallist/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/cacache/node_modules/yallist/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/yallist/iterator.js b/node_modules/cacache/node_modules/yallist/iterator.js deleted file mode 100644 index d41c97a19f984..0000000000000 --- a/node_modules/cacache/node_modules/yallist/iterator.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' -module.exports = function (Yallist) { - Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value - } - } -} diff --git a/node_modules/cacache/node_modules/yallist/yallist.js b/node_modules/cacache/node_modules/yallist/yallist.js deleted file mode 100644 index b0ab36cf31b7a..0000000000000 --- a/node_modules/cacache/node_modules/yallist/yallist.js +++ /dev/null @@ -1,376 +0,0 @@ -'use strict' -module.exports = Yallist - -Yallist.Node = Node -Yallist.create = Yallist - -function Yallist (list) { - var self = this - if (!(self instanceof Yallist)) { - self = new Yallist() - } - - self.tail = null - self.head = null - self.length = 0 - - if (list && typeof list.forEach === 'function') { - list.forEach(function (item) { - self.push(item) - }) - } else if (arguments.length > 0) { - for (var i = 0, l = arguments.length; i < l; i++) { - self.push(arguments[i]) - } - } - - return self -} - -Yallist.prototype.removeNode = function (node) { - if (node.list !== this) { - throw new Error('removing node which does not belong to this list') - } - - var next = node.next - var prev = node.prev - - if (next) { - next.prev = prev - } - - if (prev) { - prev.next = next - } - - if (node === this.head) { - this.head = next - } - if (node === this.tail) { - this.tail = prev - } - - node.list.length-- - node.next = null - node.prev = null - node.list = null -} - -Yallist.prototype.unshiftNode = function (node) { - if (node === this.head) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var head = this.head - node.list = this - node.next = head - if (head) { - head.prev = node - } - - this.head = node - if (!this.tail) { - this.tail = node - } - this.length++ -} - -Yallist.prototype.pushNode = function (node) { - if (node === this.tail) { - return - } - - if (node.list) { - node.list.removeNode(node) - } - - var tail = this.tail - node.list = this - node.prev = tail - if (tail) { - tail.next = node - } - - this.tail = node - if (!this.head) { - this.head = node - } - this.length++ -} - -Yallist.prototype.push = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - push(this, arguments[i]) - } - return this.length -} - -Yallist.prototype.unshift = function () { - for (var i = 0, l = arguments.length; i < l; i++) { - unshift(this, arguments[i]) - } - return this.length -} - -Yallist.prototype.pop = function () { - if (!this.tail) { - return undefined - } - - var res = this.tail.value - this.tail = this.tail.prev - if (this.tail) { - this.tail.next = null - } else { - this.head = null - } - this.length-- - return res -} - -Yallist.prototype.shift = function () { - if (!this.head) { - return undefined - } - - var res = this.head.value - this.head = this.head.next - if (this.head) { - this.head.prev = null - } else { - this.tail = null - } - this.length-- - return res -} - -Yallist.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.head, i = 0; walker !== null; i++) { - fn.call(thisp, walker.value, i, this) - walker = walker.next - } -} - -Yallist.prototype.forEachReverse = function (fn, thisp) { - thisp = thisp || this - for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { - fn.call(thisp, walker.value, i, this) - walker = walker.prev - } -} - -Yallist.prototype.get = function (n) { - for (var i = 0, walker = this.head; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.next - } - if (i === n && walker !== null) { - return walker.value - } -} - -Yallist.prototype.getReverse = function (n) { - for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { - // abort out of the list early if we hit a cycle - walker = walker.prev - } - if (i === n && walker !== null) { - return walker.value - } -} - -Yallist.prototype.map = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.head; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.next - } - return res -} - -Yallist.prototype.mapReverse = function (fn, thisp) { - thisp = thisp || this - var res = new Yallist() - for (var walker = this.tail; walker !== null;) { - res.push(fn.call(thisp, walker.value, this)) - walker = walker.prev - } - return res -} - -Yallist.prototype.reduce = function (fn, initial) { - var acc - var walker = this.head - if (arguments.length > 1) { - acc = initial - } else if (this.head) { - walker = this.head.next - acc = this.head.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = 0; walker !== null; i++) { - acc = fn(acc, walker.value, i) - walker = walker.next - } - - return acc -} - -Yallist.prototype.reduceReverse = function (fn, initial) { - var acc - var walker = this.tail - if (arguments.length > 1) { - acc = initial - } else if (this.tail) { - walker = this.tail.prev - acc = this.tail.value - } else { - throw new TypeError('Reduce of empty list with no initial value') - } - - for (var i = this.length - 1; walker !== null; i--) { - acc = fn(acc, walker.value, i) - walker = walker.prev - } - - return acc -} - -Yallist.prototype.toArray = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.head; walker !== null; i++) { - arr[i] = walker.value - walker = walker.next - } - return arr -} - -Yallist.prototype.toArrayReverse = function () { - var arr = new Array(this.length) - for (var i = 0, walker = this.tail; walker !== null; i++) { - arr[i] = walker.value - walker = walker.prev - } - return arr -} - -Yallist.prototype.slice = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = 0, walker = this.head; walker !== null && i < from; i++) { - walker = walker.next - } - for (; walker !== null && i < to; i++, walker = walker.next) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.sliceReverse = function (from, to) { - to = to || this.length - if (to < 0) { - to += this.length - } - from = from || 0 - if (from < 0) { - from += this.length - } - var ret = new Yallist() - if (to < from || to < 0) { - return ret - } - if (from < 0) { - from = 0 - } - if (to > this.length) { - to = this.length - } - for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { - walker = walker.prev - } - for (; walker !== null && i > from; i--, walker = walker.prev) { - ret.push(walker.value) - } - return ret -} - -Yallist.prototype.reverse = function () { - var head = this.head - var tail = this.tail - for (var walker = head; walker !== null; walker = walker.prev) { - var p = walker.prev - walker.prev = walker.next - walker.next = p - } - this.head = tail - this.tail = head - return this -} - -function push (self, item) { - self.tail = new Node(item, self.tail, null, self) - if (!self.head) { - self.head = self.tail - } - self.length++ -} - -function unshift (self, item) { - self.head = new Node(item, null, self.head, self) - if (!self.tail) { - self.tail = self.head - } - self.length++ -} - -function Node (value, prev, next, list) { - if (!(this instanceof Node)) { - return new Node(value, prev, next, list) - } - - this.list = list - this.value = value - - if (prev) { - prev.next = this - this.prev = prev - } else { - this.prev = null - } - - if (next) { - next.prev = this - this.next = next - } else { - this.next = null - } -} - -try { - // add if support for Symbol.iterator is present - require('./iterator.js')(Yallist) -} catch (er) {} diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index c5cce2b1060c7..2e052a47b775d 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,5 +1,5 @@ { - "_from": "cacache@^11.3.2", + "_from": "cacache@^11.3.3", "_id": "cacache@11.3.3", "_inBundle": false, "_integrity": "sha512-p8WcneCytvzPxhDvYp31PD039vi77I12W+/KfR9S8AZbaiARFBCpsPJS+9uhWfeBfeAtW7o/4vt3MUqLkbY6nA==", @@ -15,12 +15,12 @@ "_requested": { "type": "range", "registry": true, - "raw": "cacache@^11.3.2", + "raw": "cacache@^11.3.3", "name": "cacache", "escapedName": "cacache", - "rawSpec": "^11.3.2", + "rawSpec": "^11.3.3", "saveSpec": null, - "fetchSpec": "^11.3.2" + "fetchSpec": "^11.3.3" }, "_requiredBy": [ "#USER", @@ -30,7 +30,7 @@ ], "_resolved": "https://registry.npmjs.org/cacache/-/cacache-11.3.3.tgz", "_shasum": "8bd29df8c6a718a6ebd2d010da4d7972ae3bbadc", - "_spec": "cacache@^11.3.2", + "_spec": "cacache@^11.3.3", "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Kat Marchán", diff --git a/node_modules/call-limit/CHANGELOG.md b/node_modules/call-limit/CHANGELOG.md new file mode 100644 index 0000000000000..a6b1df978e11d --- /dev/null +++ b/node_modules/call-limit/CHANGELOG.md @@ -0,0 +1,16 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [1.1.1](https://github.com/iarna/call-limit/compare/v1.1.0...v1.1.1) (2019-06-03) + + +### Bug Fixes + +* **call-limit:** Limiter rewrite ([bbd0ea6](https://github.com/iarna/call-limit/commit/bbd0ea6)) +* **test:** Update tests to let/const ([f0b7f98](https://github.com/iarna/call-limit/commit/f0b7f98)) + + +### Tests + +* Test coverage for limited promise based methods ([d5069f4](https://github.com/iarna/call-limit/commit/d5069f4)) diff --git a/node_modules/node-gyp/node_modules/tar/LICENSE b/node_modules/call-limit/LICENSE similarity index 82% rename from node_modules/node-gyp/node_modules/tar/LICENSE rename to node_modules/call-limit/LICENSE index 019b7e40ea056..216e843abcbac 100644 --- a/node_modules/node-gyp/node_modules/tar/LICENSE +++ b/node_modules/call-limit/LICENSE @@ -1,12 +1,13 @@ -The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors +Copyright Rebecca Turner + Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/call-limit/README.md b/node_modules/call-limit/README.md index 590ca419ee6cf..62086b68496f3 100644 --- a/node_modules/call-limit/README.md +++ b/node_modules/call-limit/README.md @@ -4,17 +4,17 @@ call-limit Limit the number of simultaneous executions of a async function. ```javascript -var fs = require('fs') -var limit = require('call-limit') -var limitedStat = limit(fs.stat, 5) +const fs = require('fs') +const limit = require('call-limit') +const limitedStat = limit(fs.stat, 5) ``` Or with promise returning functions: ```javascript -var fs = Bluebird.promisifyAll(require('fs')) -var limit = require('call-limit') -var limitedStat = limit.promise(fs.statAsync, 5) +const fs = Bluebird.promisifyAll(require('fs')) +const limit = require('call-limit') +const limitedStat = limit.promise(fs.statAsync, 5) ``` ### USAGE: @@ -22,7 +22,7 @@ var limitedStat = limit.promise(fs.statAsync, 5) Given that: ```javascript -var limit = require('call-limit') +const limit = require('call-limit') ``` ### limit(func, maxRunning) → limitedFunc diff --git a/node_modules/call-limit/call-limit.js b/node_modules/call-limit/call-limit.js index d6e4534b8a2e8..2dc7d279a3a52 100644 --- a/node_modules/call-limit/call-limit.js +++ b/node_modules/call-limit/call-limit.js @@ -1,86 +1,99 @@ -"use strict" +'use strict' -var defaultMaxRunning = 50 +const defaultMaxRunning = 50 -var limit = module.exports = function (func, maxRunning) { - var running = 0 - var queue = [] +const limit = module.exports = function (func, maxRunning) { + const state = {running: 0, queue: []} if (!maxRunning) maxRunning = defaultMaxRunning return function limited () { - var self = this - var args = Array.prototype.slice.call(arguments) - if (running >= maxRunning) { - queue.push({self: this, args: args}) - return + const args = Array.prototype.slice.call(arguments) + if (state.running >= maxRunning) { + state.queue.push({obj: this, args}) + } else { + callFunc(this, args) + } + } + function callNext () { + if (state.queue.length === 0) return + const next = state.queue.shift() + callFunc(next.obj, next.args) + } + function callFunc (obj, args) { + const cb = typeof args[args.length - 1] === 'function' && args.pop() + try { + ++state.running + func.apply(obj, args.concat(function () { + --state.running + process.nextTick(callNext) + if (cb) process.nextTick(() => cb.apply(obj, arguments)) + })) + } catch (err) { + --state.running + if (cb) process.nextTick(() => cb.call(obj, err)) + process.nextTick(callNext) } - var cb = typeof args[args.length-1] === 'function' && args.pop() - ++ running - args.push(function () { - var cbargs = arguments - -- running - cb && process.nextTick(function () { - cb.apply(self, cbargs) - }) - if (queue.length) { - var next = queue.shift() - limited.apply(next.self, next.args) - } - }) - func.apply(self, args) } } module.exports.method = function (classOrObj, method, maxRunning) { if (typeof classOrObj === 'function') { - var func = classOrObj.prototype[method] + const func = classOrObj.prototype[method] classOrObj.prototype[method] = limit(func, maxRunning) } else { - var func = classOrObj[method] + const func = classOrObj[method] classOrObj[method] = limit(func, maxRunning) } } module.exports.promise = function (func, maxRunning) { - var running = 0 - var queue = [] + const state = {running: 0, queue: []} if (!maxRunning) maxRunning = defaultMaxRunning - return function () { - var self = this - var args = Array.prototype.slice.call(arguments) - return new Promise(function (resolve) { - if (running >= maxRunning) { - queue.push({self: self, args: args, resolve: resolve}) - return - } else { - runNext(self, args, resolve) - } - function runNext (self, args, resolve) { - ++ running - resolve( - func.apply(self, args) - .then(finish, function (err) { - finish(err) - throw err - })) - } - - function finish () { - -- running - if (queue.length) { - var next = queue.shift() - process.nextTick(runNext, next.self, next.args, next.resolve) - } - } + return function limited () { + const args = Array.prototype.slice.call(arguments) + if (state.running >= maxRunning) { + return new Promise(resolve => { + state.queue.push({resolve, obj: this, args}) + }) + } else { + return callFunc(this, args) + } + } + function callNext () { + if (state.queue.length === 0) return + const next = state.queue.shift() + next.resolve(callFunc(next.obj, next.args)) + } + function callFunc (obj, args) { + return callFinally(() => { + ++state.running + return func.apply(obj, args) + }, () => { + --state.running + process.nextTick(callNext) }) } + function callFinally (action, fin) { + try { + return Promise.resolve(action()).then(value => { + fin() + return value + }, err => { + fin() + return Promise.reject(err) + }) + } catch (err) { + fin() + return Promise.reject(err) + } + } } module.exports.promise.method = function (classOrObj, method, maxRunning) { if (typeof classOrObj === 'function') { - var func = classOrObj.prototype[method] + const func = classOrObj.prototype[method] classOrObj.prototype[method] = limit.promise(func, maxRunning) } else { - var func = classOrObj[method] + const func = classOrObj[method] classOrObj[method] = limit.promise(func, maxRunning) } } diff --git a/node_modules/call-limit/package.json b/node_modules/call-limit/package.json index e074b244eb367..2e3b264de115b 100644 --- a/node_modules/call-limit/package.json +++ b/node_modules/call-limit/package.json @@ -1,32 +1,28 @@ { - "_args": [ - [ - "call-limit@1.1.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "call-limit@1.1.0", - "_id": "call-limit@1.1.0", + "_from": "call-limit@1.1.1", + "_id": "call-limit@1.1.1", "_inBundle": false, - "_integrity": "sha1-b9YbA/PaQqLNDsK2DwK9DnGZH+o=", + "_integrity": "sha512-5twvci5b9eRBw2wCfPtN0GmlR2/gadZqyFpPhOK6CvMFoFgA+USnZ6Jpu1lhG9h85pQ3Ouil3PfXWRD4EUaRiQ==", "_location": "/call-limit", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "call-limit@1.1.0", + "raw": "call-limit@1.1.1", "name": "call-limit", "escapedName": "call-limit", - "rawSpec": "1.1.0", + "rawSpec": "1.1.1", "saveSpec": null, - "fetchSpec": "1.1.0" + "fetchSpec": "1.1.1" }, "_requiredBy": [ + "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.0.tgz", - "_spec": "1.1.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.1.tgz", + "_shasum": "ef15f2670db3f1992557e2d965abc459e6e358d4", + "_spec": "call-limit@1.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Rebecca Turner", "email": "me@re-becca.org" @@ -34,10 +30,16 @@ "bugs": { "url": "https://github.com/iarna/call-limit/issues" }, + "bundleDependencies": false, "dependencies": {}, + "deprecated": false, "description": "Limit the number of simultaneous calls to an async function", "devDependencies": { - "tap": "^1.0.0" + "@iarna/standard": "*", + "standard-version": "*", + "tap": "^14.2.0", + "weallbehave": "*", + "weallcontribute": "*" }, "files": [ "call-limit.js" @@ -51,7 +53,13 @@ "url": "git+https://github.com/iarna/call-limit.git" }, "scripts": { - "test": "tap test/*.js" + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "iarna-standard", + "release": "standard-version -s", + "test": "tap test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "1.1.0" + "version": "1.1.1" } diff --git a/node_modules/block-stream/LICENSE b/node_modules/cross-spawn/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/block-stream/LICENSE rename to node_modules/cross-spawn/node_modules/lru-cache/LICENSE diff --git a/node_modules/cacache/node_modules/lru-cache/README.md b/node_modules/cross-spawn/node_modules/lru-cache/README.md similarity index 89% rename from node_modules/cacache/node_modules/lru-cache/README.md rename to node_modules/cross-spawn/node_modules/lru-cache/README.md index 435dfebb7e27d..d660dd5747abe 100644 --- a/node_modules/cacache/node_modules/lru-cache/README.md +++ b/node_modules/cross-spawn/node_modules/lru-cache/README.md @@ -18,8 +18,8 @@ var LRU = require("lru-cache") , length: function (n, key) { return n * 2 + key.length } , dispose: function (key, n) { n.close() } , maxAge: 1000 * 60 * 60 } - , cache = new LRU(options) - , otherCache = new LRU(50) // sets just the max size + , cache = LRU(options) + , otherCache = LRU(50) // sets just the max size cache.set("key", "value") cache.get("key") // "value" @@ -49,13 +49,10 @@ away. * `max` The maximum size of the cache, checked by applying the length function to all values in the cache. Not setting this is kind of silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. Setting it to a non-number or negative number will - throw a `TypeError`. Setting it to 0 makes it be `Infinity`. + to `Infinity`. * `maxAge` Maximum age in ms. Items are not pro-actively pruned out as they age, but if you try to get an item that is too old, it'll drop it and return undefined instead of giving it to you. - Setting this to a negative value will make everything seem old! - Setting it to a non-number will throw a `TypeError`. * `length` Function that is used to calculate the length of stored items. If you're storing strings or buffers, then you probably want to do something like `function(n, key){return n.length}`. The default is @@ -79,11 +76,6 @@ away. it'll be called whenever a `set()` operation overwrites an existing key. If you set this option, `dispose()` will only be called when a key falls out of the cache, not when it is overwritten. -* `updateAgeOnGet` When using time-expiring entries with `maxAge`, - setting this to `true` will make each item's effective time update - to the current time whenever it is retrieved from cache, causing it - to not expire. (It can still fall out of cache based on recency of - use, of course.) ## API diff --git a/node_modules/cross-spawn/node_modules/lru-cache/index.js b/node_modules/cross-spawn/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..bd35b53589381 --- /dev/null +++ b/node_modules/cross-spawn/node_modules/lru-cache/index.js @@ -0,0 +1,468 @@ +'use strict' + +module.exports = LRUCache + +// This will be a proper iterable 'Map' in engines that support it, +// or a fakey-fake PseudoMap in older versions. +var Map = require('pseudomap') +var util = require('util') + +// A linked list to keep track of recently-used-ness +var Yallist = require('yallist') + +// use symbols if possible, otherwise just _props +var hasSymbol = typeof Symbol === 'function' && process.env._nodeLRUCacheForceNoSymbol !== '1' +var makeSymbol +if (hasSymbol) { + makeSymbol = function (key) { + return Symbol(key) + } +} else { + makeSymbol = function (key) { + return '_' + key + } +} + +var MAX = makeSymbol('max') +var LENGTH = makeSymbol('length') +var LENGTH_CALCULATOR = makeSymbol('lengthCalculator') +var ALLOW_STALE = makeSymbol('allowStale') +var MAX_AGE = makeSymbol('maxAge') +var DISPOSE = makeSymbol('dispose') +var NO_DISPOSE_ON_SET = makeSymbol('noDisposeOnSet') +var LRU_LIST = makeSymbol('lruList') +var CACHE = makeSymbol('cache') + +function naiveLength () { return 1 } + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +function LRUCache (options) { + if (!(this instanceof LRUCache)) { + return new LRUCache(options) + } + + if (typeof options === 'number') { + options = { max: options } + } + + if (!options) { + options = {} + } + + var max = this[MAX] = options.max + // Kind of weird to have a default max of Infinity, but oh well. + if (!max || + !(typeof max === 'number') || + max <= 0) { + this[MAX] = Infinity + } + + var lc = options.length || naiveLength + if (typeof lc !== 'function') { + lc = naiveLength + } + this[LENGTH_CALCULATOR] = lc + + this[ALLOW_STALE] = options.stale || false + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this.reset() +} + +// resize the cache when the max changes. +Object.defineProperty(LRUCache.prototype, 'max', { + set: function (mL) { + if (!mL || !(typeof mL === 'number') || mL <= 0) { + mL = Infinity + } + this[MAX] = mL + trim(this) + }, + get: function () { + return this[MAX] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'allowStale', { + set: function (allowStale) { + this[ALLOW_STALE] = !!allowStale + }, + get: function () { + return this[ALLOW_STALE] + }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'maxAge', { + set: function (mA) { + if (!mA || !(typeof mA === 'number') || mA < 0) { + mA = 0 + } + this[MAX_AGE] = mA + trim(this) + }, + get: function () { + return this[MAX_AGE] + }, + enumerable: true +}) + +// resize the cache when the lengthCalculator changes. +Object.defineProperty(LRUCache.prototype, 'lengthCalculator', { + set: function (lC) { + if (typeof lC !== 'function') { + lC = naiveLength + } + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(function (hit) { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }, this) + } + trim(this) + }, + get: function () { return this[LENGTH_CALCULATOR] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'length', { + get: function () { return this[LENGTH] }, + enumerable: true +}) + +Object.defineProperty(LRUCache.prototype, 'itemCount', { + get: function () { return this[LRU_LIST].length }, + enumerable: true +}) + +LRUCache.prototype.rforEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].tail; walker !== null;) { + var prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } +} + +function forEachStep (self, fn, node, thisp) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) { + hit = undefined + } + } + if (hit) { + fn.call(thisp, hit.value, hit.key, self) + } +} + +LRUCache.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this[LRU_LIST].head; walker !== null;) { + var next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } +} + +LRUCache.prototype.keys = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.key + }, this) +} + +LRUCache.prototype.values = function () { + return this[LRU_LIST].toArray().map(function (k) { + return k.value + }, this) +} + +LRUCache.prototype.reset = function () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(function (hit) { + this[DISPOSE](hit.key, hit.value) + }, this) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list +} + +LRUCache.prototype.dump = function () { + return this[LRU_LIST].map(function (hit) { + if (!isStale(this, hit)) { + return { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + } + } + }, this).toArray().filter(function (h) { + return h + }) +} + +LRUCache.prototype.dumpLru = function () { + return this[LRU_LIST] +} + +/* istanbul ignore next */ +LRUCache.prototype.inspect = function (n, opts) { + var str = 'LRUCache {' + var extras = false + + var as = this[ALLOW_STALE] + if (as) { + str += '\n allowStale: true' + extras = true + } + + var max = this[MAX] + if (max && max !== Infinity) { + if (extras) { + str += ',' + } + str += '\n max: ' + util.inspect(max, opts) + extras = true + } + + var maxAge = this[MAX_AGE] + if (maxAge) { + if (extras) { + str += ',' + } + str += '\n maxAge: ' + util.inspect(maxAge, opts) + extras = true + } + + var lc = this[LENGTH_CALCULATOR] + if (lc && lc !== naiveLength) { + if (extras) { + str += ',' + } + str += '\n length: ' + util.inspect(this[LENGTH], opts) + extras = true + } + + var didFirst = false + this[LRU_LIST].forEach(function (item) { + if (didFirst) { + str += ',\n ' + } else { + if (extras) { + str += ',\n' + } + didFirst = true + str += '\n ' + } + var key = util.inspect(item.key).split('\n').join('\n ') + var val = { value: item.value } + if (item.maxAge !== maxAge) { + val.maxAge = item.maxAge + } + if (lc !== naiveLength) { + val.length = item.length + } + if (isStale(this, item)) { + val.stale = true + } + + val = util.inspect(val, opts).split('\n').join('\n ') + str += key + ' => ' + val + }) + + if (didFirst || extras) { + str += '\n' + } + str += '}' + + return str +} + +LRUCache.prototype.set = function (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + var now = maxAge ? Date.now() : 0 + var len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + var node = this[CACHE].get(key) + var item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) { + this[DISPOSE](key, item.value) + } + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + var hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) { + this[DISPOSE](key, value) + } + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true +} + +LRUCache.prototype.has = function (key) { + if (!this[CACHE].has(key)) return false + var hit = this[CACHE].get(key).value + if (isStale(this, hit)) { + return false + } + return true +} + +LRUCache.prototype.get = function (key) { + return get(this, key, true) +} + +LRUCache.prototype.peek = function (key) { + return get(this, key, false) +} + +LRUCache.prototype.pop = function () { + var node = this[LRU_LIST].tail + if (!node) return null + del(this, node) + return node.value +} + +LRUCache.prototype.del = function (key) { + del(this, this[CACHE].get(key)) +} + +LRUCache.prototype.load = function (arr) { + // reset the cache + this.reset() + + var now = Date.now() + // A previous serialized cache has the most recent items first + for (var l = arr.length - 1; l >= 0; l--) { + var hit = arr[l] + var expiresAt = hit.e || 0 + if (expiresAt === 0) { + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + } else { + var maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } +} + +LRUCache.prototype.prune = function () { + var self = this + this[CACHE].forEach(function (value, key) { + get(self, key, false) + }) +} + +function get (self, key, doUse) { + var node = self[CACHE].get(key) + if (node) { + var hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) hit = undefined + } else { + if (doUse) { + self[LRU_LIST].unshiftNode(node) + } + } + if (hit) hit = hit.value + } + return hit +} + +function isStale (self, hit) { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) { + return false + } + var stale = false + var diff = Date.now() - hit.now + if (hit.maxAge) { + stale = diff > hit.maxAge + } else { + stale = self[MAX_AGE] && (diff > self[MAX_AGE]) + } + return stale +} + +function trim (self) { + if (self[LENGTH] > self[MAX]) { + for (var walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + var prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +function del (self, node) { + if (node) { + var hit = node.value + if (self[DISPOSE]) { + self[DISPOSE](hit.key, hit.value) + } + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +// classy, since V8 prefers predictable objects. +function Entry (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 +} diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/cross-spawn/node_modules/lru-cache/package.json similarity index 63% rename from node_modules/pacote/node_modules/lru-cache/package.json rename to node_modules/cross-spawn/node_modules/lru-cache/package.json index a7fbba4b55ddd..092742d4d6e4d 100644 --- a/node_modules/pacote/node_modules/lru-cache/package.json +++ b/node_modules/cross-spawn/node_modules/lru-cache/package.json @@ -1,27 +1,27 @@ { - "_from": "lru-cache@^5.1.1", - "_id": "lru-cache@5.1.1", + "_from": "lru-cache@^4.0.1", + "_id": "lru-cache@4.1.5", "_inBundle": false, - "_integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "_location": "/pacote/lru-cache", + "_integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "_location": "/cross-spawn/lru-cache", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "lru-cache@^5.1.1", + "raw": "lru-cache@^4.0.1", "name": "lru-cache", "escapedName": "lru-cache", - "rawSpec": "^5.1.1", + "rawSpec": "^4.0.1", "saveSpec": null, - "fetchSpec": "^5.1.1" + "fetchSpec": "^4.0.1" }, "_requiredBy": [ - "/pacote" + "/cross-spawn" ], - "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "_shasum": "1da27e6710271947695daf6848e847f01d84b920", - "_spec": "lru-cache@^5.1.1", - "_where": "/Users/aeschright/code/cli/node_modules/pacote", + "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "_shasum": "8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd", + "_spec": "lru-cache@^4.0.1", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/cross-spawn", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me" @@ -31,12 +31,14 @@ }, "bundleDependencies": false, "dependencies": { - "yallist": "^3.0.2" + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" }, "deprecated": false, "description": "A cache object that deletes the least-recently-used items.", "devDependencies": { "benchmark": "^2.1.4", + "standard": "^12.0.1", "tap": "^12.1.0" }, "files": [ @@ -57,11 +59,13 @@ }, "scripts": { "coveragerport": "tap --coverage-report=html", + "lintfix": "standard --fix test/*.js index.js", "postpublish": "git push origin --all; git push origin --tags", - "postversion": "npm publish", + "posttest": "standard test/*.js index.js", + "postversion": "npm publish --tag=legacy", "preversion": "npm test", "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", "test": "tap test/*.js --100 -J" }, - "version": "5.1.1" + "version": "4.1.5" } diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/cross-spawn/node_modules/yallist/LICENSE similarity index 100% rename from node_modules/cacache/node_modules/lru-cache/LICENSE rename to node_modules/cross-spawn/node_modules/yallist/LICENSE diff --git a/node_modules/cacache/node_modules/yallist/README.md b/node_modules/cross-spawn/node_modules/yallist/README.md similarity index 100% rename from node_modules/cacache/node_modules/yallist/README.md rename to node_modules/cross-spawn/node_modules/yallist/README.md diff --git a/node_modules/cross-spawn/node_modules/yallist/iterator.js b/node_modules/cross-spawn/node_modules/yallist/iterator.js new file mode 100644 index 0000000000000..4a15bf22c4003 --- /dev/null +++ b/node_modules/cross-spawn/node_modules/yallist/iterator.js @@ -0,0 +1,7 @@ +var Yallist = require('./yallist.js') + +Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } +} diff --git a/node_modules/cacache/node_modules/yallist/package.json b/node_modules/cross-spawn/node_modules/yallist/package.json similarity index 63% rename from node_modules/cacache/node_modules/yallist/package.json rename to node_modules/cross-spawn/node_modules/yallist/package.json index 91ea442aa86f1..de575bcbb85a8 100644 --- a/node_modules/cacache/node_modules/yallist/package.json +++ b/node_modules/cross-spawn/node_modules/yallist/package.json @@ -1,27 +1,27 @@ { - "_from": "yallist@^3.0.2", - "_id": "yallist@3.0.3", + "_from": "yallist@^2.1.2", + "_id": "yallist@2.1.2", "_inBundle": false, - "_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", - "_location": "/cacache/yallist", + "_integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "_location": "/cross-spawn/yallist", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "yallist@^3.0.2", + "raw": "yallist@^2.1.2", "name": "yallist", "escapedName": "yallist", - "rawSpec": "^3.0.2", + "rawSpec": "^2.1.2", "saveSpec": null, - "fetchSpec": "^3.0.2" + "fetchSpec": "^2.1.2" }, "_requiredBy": [ - "/cacache/lru-cache" + "/cross-spawn/lru-cache" ], - "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "_shasum": "b4b049e314be545e3ce802236d6cd22cd91c3de9", - "_spec": "yallist@^3.0.2", - "_where": "/Users/aeschright/code/cli/node_modules/cacache/node_modules/lru-cache", + "_resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "_shasum": "1c11f9218f076089a47dd512f93c6699a6a81d52", + "_spec": "yallist@^2.1.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/cross-spawn/node_modules/lru-cache", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -35,7 +35,7 @@ "deprecated": false, "description": "Yet Another Linked List", "devDependencies": { - "tap": "^12.1.0" + "tap": "^10.3.0" }, "directories": { "test": "test" @@ -58,5 +58,5 @@ "preversion": "npm test", "test": "tap test/*.js --100" }, - "version": "3.0.3" + "version": "2.1.2" } diff --git a/node_modules/pacote/node_modules/yallist/yallist.js b/node_modules/cross-spawn/node_modules/yallist/yallist.js similarity index 98% rename from node_modules/pacote/node_modules/yallist/yallist.js rename to node_modules/cross-spawn/node_modules/yallist/yallist.js index b0ab36cf31b7a..518d23330b936 100644 --- a/node_modules/pacote/node_modules/yallist/yallist.js +++ b/node_modules/cross-spawn/node_modules/yallist/yallist.js @@ -1,4 +1,3 @@ -'use strict' module.exports = Yallist Yallist.Node = Node @@ -369,8 +368,3 @@ function Node (value, prev, next, list) { this.next = null } } - -try { - // add if support for Symbol.iterator is present - require('./iterator.js')(Yallist) -} catch (er) {} diff --git a/node_modules/fstream/.travis.yml b/node_modules/fstream/.travis.yml deleted file mode 100644 index 9f5972ab5aa1c..0000000000000 --- a/node_modules/fstream/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: node_js -node_js: - - "6" - - "4" - - "0.10" - - "0.12" -before_install: - - "npm config set spin false" - - "npm install -g npm/npm" diff --git a/node_modules/fstream/LICENSE b/node_modules/fstream/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/fstream/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fstream/README.md b/node_modules/fstream/README.md deleted file mode 100644 index 9d8cb77e5c30b..0000000000000 --- a/node_modules/fstream/README.md +++ /dev/null @@ -1,76 +0,0 @@ -Like FS streams, but with stat on them, and supporting directories and -symbolic links, as well as normal files. Also, you can use this to set -the stats on a file, even if you don't change its contents, or to create -a symlink, etc. - -So, for example, you can "write" a directory, and it'll call `mkdir`. You -can specify a uid and gid, and it'll call `chown`. You can specify a -`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink -and provide a `linkpath` and it'll call `symlink`. - -Note that it won't automatically resolve symbolic links. So, if you -call `fstream.Reader('/some/symlink')` then you'll get an object -that stats and then ends immediately (since it has no data). To follow -symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow: -true })`. - -There are various checks to make sure that the bytes emitted are the -same as the intended size, if the size is set. - -## Examples - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - }) - .write("hello\n") - .end() -``` - -This will create the directories if they're missing, and then write -`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have -been written when it's done. - -```javascript -fstream - .Writer({ path: "path/to/file" - , mode: 0755 - , size: 6 - , flags: "a" - }) - .write("hello\n") - .end() -``` - -You can pass flags in, if you want to append to a file. - -```javascript -fstream - .Writer({ path: "path/to/symlink" - , linkpath: "./file" - , SymbolicLink: true - , mode: "0755" // octal strings supported - }) - .end() -``` - -If isSymbolicLink is a function, it'll be called, and if it returns -true, then it'll treat it as a symlink. If it's not a function, then -any truish value will make a symlink, or you can set `type: -'SymbolicLink'`, which does the same thing. - -Note that the linkpath is relative to the symbolic link location, not -the parent dir or cwd. - -```javascript -fstream - .Reader("path/to/dir") - .pipe(fstream.Writer("path/to/other/dir")) -``` - -This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other -dir exists and isn't a directory, then it'll emit an error. It'll also -set the uid, gid, mode, etc. to be identical. In this way, it's more -like `rsync -a` than simply a copy. diff --git a/node_modules/fstream/examples/filter-pipe.js b/node_modules/fstream/examples/filter-pipe.js deleted file mode 100644 index 83dadef8a6f39..0000000000000 --- a/node_modules/fstream/examples/filter-pipe.js +++ /dev/null @@ -1,134 +0,0 @@ -var fstream = require('../fstream.js') -var path = require('path') - -var r = fstream.Reader({ - path: path.dirname(__dirname), - filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) && - !this.basename.match(/^deep-copy$/) && - !this.basename.match(/^filter-copy$/) - } -}) - -// this writer will only write directories -var w = fstream.Writer({ - path: path.resolve(__dirname, 'filter-copy'), - type: 'Directory', - filter: function () { - return this.type === 'Directory' - } -}) - -var indent = '' - -r.on('entry', appears) -r.on('ready', function () { - console.error('ready to begin!', r.path) -}) - -function appears (entry) { - console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename) - if (foggy) { - console.error('FOGGY!') - var p = entry - do { - console.error(p.depth, p.path, p._paused) - p = p.parent - } while (p) - - throw new Error('\u001b[mshould not have entries while foggy') - } - indent += '\t' - entry.on('data', missile(entry)) - entry.on('end', runaway(entry)) - entry.on('entry', appears) -} - -var foggy -function missile (entry) { - function liftFog (who) { - if (!foggy) return - if (who) { - console.error('%s breaks the spell!', who && who.path) - } else { - console.error('the spell expires!') - } - console.error('\u001b[mthe fog lifts!\n') - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - if (entry.type === 'Directory') { - var ended = false - entry.once('end', function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent + '%s casts a spell', entry.basename) - console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') - entry.pause() - entry.once('resume', liftFog) - foggy = setTimeout(liftFog, 1000) - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + '%s %s for %d damage!', - entry.basename, - e ? 'is struck' : 'fires a chunk', - c.length) - } -} - -function runaway (entry) { - return function () { - var e = Math.random() < 0.5 - console.error(indent + '%s %s', - entry.basename, - e ? 'turns to flee' : 'is vanquished!') - indent = indent.slice(0, -1) - } -} - -w.on('entry', attacks) -// w.on('ready', function () { attacks(w) }) -function attacks (entry) { - console.error(indent + '%s %s!', entry.basename, - entry.type === 'Directory' ? 'calls for backup' : 'attacks') - entry.on('entry', attacks) -} - -var ended = false -var i = 1 -r.on('end', function () { - if (foggy) clearTimeout(foggy) - console.error("\u001b[mIT'S OVER!!") - console.error('A WINNAR IS YOU!') - - console.log('ok ' + (i++) + ' A WINNAR IS YOU') - ended = true - // now go through and verify that everything in there is a dir. - var p = path.resolve(__dirname, 'filter-copy') - var checker = fstream.Reader({ path: p }) - checker.checker = true - checker.on('child', function (e) { - var ok = e.type === 'Directory' - console.log((ok ? '' : 'not ') + 'ok ' + (i++) + - ' should be a dir: ' + - e.path.substr(checker.path.length + 1)) - }) -}) - -process.on('exit', function () { - console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended') - console.log('1..' + i) -}) - -r.pipe(w) diff --git a/node_modules/fstream/examples/pipe.js b/node_modules/fstream/examples/pipe.js deleted file mode 100644 index 3de42ef32bfd7..0000000000000 --- a/node_modules/fstream/examples/pipe.js +++ /dev/null @@ -1,118 +0,0 @@ -var fstream = require('../fstream.js') -var path = require('path') - -var r = fstream.Reader({ - path: path.dirname(__dirname), - filter: function () { - return !this.basename.match(/^\./) && - !this.basename.match(/^node_modules$/) && - !this.basename.match(/^deep-copy$/) - } -}) - -var w = fstream.Writer({ - path: path.resolve(__dirname, 'deep-copy'), - type: 'Directory' -}) - -var indent = '' - -r.on('entry', appears) -r.on('ready', function () { - console.error('ready to begin!', r.path) -}) - -function appears (entry) { - console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry) - if (foggy) { - console.error('FOGGY!') - var p = entry - do { - console.error(p.depth, p.path, p._paused) - p = p.parent - } while (p) - - throw new Error('\u001b[mshould not have entries while foggy') - } - indent += '\t' - entry.on('data', missile(entry)) - entry.on('end', runaway(entry)) - entry.on('entry', appears) -} - -var foggy -function missile (entry) { - function liftFog (who) { - if (!foggy) return - if (who) { - console.error('%s breaks the spell!', who && who.path) - } else { - console.error('the spell expires!') - } - console.error('\u001b[mthe fog lifts!\n') - clearTimeout(foggy) - foggy = null - if (entry._paused) entry.resume() - } - - if (entry.type === 'Directory') { - var ended = false - entry.once('end', function () { ended = true }) - return function (c) { - // throw in some pathological pause()/resume() behavior - // just for extra fun. - process.nextTick(function () { - if (!foggy && !ended) { // && Math.random() < 0.3) { - console.error(indent + '%s casts a spell', entry.basename) - console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') - entry.pause() - entry.once('resume', liftFog) - foggy = setTimeout(liftFog, 10) - } - }) - } - } - - return function (c) { - var e = Math.random() < 0.5 - console.error(indent + '%s %s for %d damage!', - entry.basename, - e ? 'is struck' : 'fires a chunk', - c.length) - } -} - -function runaway (entry) { - return function () { - var e = Math.random() < 0.5 - console.error(indent + '%s %s', - entry.basename, - e ? 'turns to flee' : 'is vanquished!') - indent = indent.slice(0, -1) - } -} - -w.on('entry', attacks) -// w.on('ready', function () { attacks(w) }) -function attacks (entry) { - console.error(indent + '%s %s!', entry.basename, - entry.type === 'Directory' ? 'calls for backup' : 'attacks') - entry.on('entry', attacks) -} - -var ended = false -r.on('end', function () { - if (foggy) clearTimeout(foggy) - console.error("\u001b[mIT'S OVER!!") - console.error('A WINNAR IS YOU!') - - console.log('ok 1 A WINNAR IS YOU') - ended = true -}) - -process.on('exit', function () { - console.log((ended ? '' : 'not ') + 'ok 2 ended') - console.log('1..2') -}) - -r.pipe(w) diff --git a/node_modules/fstream/examples/reader.js b/node_modules/fstream/examples/reader.js deleted file mode 100644 index 19affbe7e6e23..0000000000000 --- a/node_modules/fstream/examples/reader.js +++ /dev/null @@ -1,68 +0,0 @@ -var fstream = require('../fstream.js') -var tap = require('tap') -var fs = require('fs') -var path = require('path') -var dir = path.dirname(__dirname) - -tap.test('reader test', function (t) { - var children = -1 - var gotReady = false - var ended = false - - var r = fstream.Reader({ - path: dir, - filter: function () { - // return this.parent === r - return this.parent === r || this === r - } - }) - - r.on('ready', function () { - gotReady = true - children = fs.readdirSync(dir).length - console.error('Setting expected children to ' + children) - t.equal(r.type, 'Directory', 'should be a directory') - }) - - r.on('entry', function (entry) { - children-- - if (!gotReady) { - t.fail('children before ready!') - } - t.equal(entry.dirname, r.path, 'basename is parent dir') - }) - - r.on('error', function (er) { - t.fail(er) - t.end() - process.exit(1) - }) - - r.on('end', function () { - t.equal(children, 0, 'should have seen all children') - ended = true - }) - - var closed = false - r.on('close', function () { - t.ok(ended, 'saw end before close') - t.notOk(closed, 'close should only happen once') - closed = true - t.end() - }) -}) - -tap.test('reader error test', function (t) { - // assumes non-root on a *nix system - var r = fstream.Reader({ path: '/etc/shadow' }) - - r.once('error', function (er) { - t.ok(true) - t.end() - }) - - r.on('end', function () { - t.fail('reader ended without error') - t.end() - }) -}) diff --git a/node_modules/fstream/examples/symlink-write.js b/node_modules/fstream/examples/symlink-write.js deleted file mode 100644 index 19e81eea9fe80..0000000000000 --- a/node_modules/fstream/examples/symlink-write.js +++ /dev/null @@ -1,27 +0,0 @@ -var fstream = require('../fstream.js') -var notOpen = false -process.chdir(__dirname) - -fstream - .Writer({ - path: 'path/to/symlink', - linkpath: './file', - isSymbolicLink: true, - mode: '0755' // octal strings supported - }) - .on('close', function () { - notOpen = true - var fs = require('fs') - var s = fs.lstatSync('path/to/symlink') - var isSym = s.isSymbolicLink() - console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink') - var t = fs.readlinkSync('path/to/symlink') - var isTarget = t === './file' - console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file') - }) - .end() - -process.on('exit', function () { - console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed') - console.log('1..3') -}) diff --git a/node_modules/fstream/fstream.js b/node_modules/fstream/fstream.js deleted file mode 100644 index c0eb3bea78803..0000000000000 --- a/node_modules/fstream/fstream.js +++ /dev/null @@ -1,35 +0,0 @@ -exports.Abstract = require('./lib/abstract.js') -exports.Reader = require('./lib/reader.js') -exports.Writer = require('./lib/writer.js') - -exports.File = { - Reader: require('./lib/file-reader.js'), - Writer: require('./lib/file-writer.js') -} - -exports.Dir = { - Reader: require('./lib/dir-reader.js'), - Writer: require('./lib/dir-writer.js') -} - -exports.Link = { - Reader: require('./lib/link-reader.js'), - Writer: require('./lib/link-writer.js') -} - -exports.Proxy = { - Reader: require('./lib/proxy-reader.js'), - Writer: require('./lib/proxy-writer.js') -} - -exports.Reader.Dir = exports.DirReader = exports.Dir.Reader -exports.Reader.File = exports.FileReader = exports.File.Reader -exports.Reader.Link = exports.LinkReader = exports.Link.Reader -exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader - -exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer -exports.Writer.File = exports.FileWriter = exports.File.Writer -exports.Writer.Link = exports.LinkWriter = exports.Link.Writer -exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer - -exports.collect = require('./lib/collect.js') diff --git a/node_modules/fstream/lib/abstract.js b/node_modules/fstream/lib/abstract.js deleted file mode 100644 index 97c120e1d5277..0000000000000 --- a/node_modules/fstream/lib/abstract.js +++ /dev/null @@ -1,85 +0,0 @@ -// the parent class for all fstreams. - -module.exports = Abstract - -var Stream = require('stream').Stream -var inherits = require('inherits') - -function Abstract () { - Stream.call(this) -} - -inherits(Abstract, Stream) - -Abstract.prototype.on = function (ev, fn) { - if (ev === 'ready' && this.ready) { - process.nextTick(fn.bind(this)) - } else { - Stream.prototype.on.call(this, ev, fn) - } - return this -} - -Abstract.prototype.abort = function () { - this._aborted = true - this.emit('abort') -} - -Abstract.prototype.destroy = function () {} - -Abstract.prototype.warn = function (msg, code) { - var self = this - var er = decorate(msg, code, self) - if (!self.listeners('warn')) { - console.error('%s %s\n' + - 'path = %s\n' + - 'syscall = %s\n' + - 'fstream_type = %s\n' + - 'fstream_path = %s\n' + - 'fstream_unc_path = %s\n' + - 'fstream_class = %s\n' + - 'fstream_stack =\n%s\n', - code || 'UNKNOWN', - er.stack, - er.path, - er.syscall, - er.fstream_type, - er.fstream_path, - er.fstream_unc_path, - er.fstream_class, - er.fstream_stack.join('\n')) - } else { - self.emit('warn', er) - } -} - -Abstract.prototype.info = function (msg, code) { - this.emit('info', msg, code) -} - -Abstract.prototype.error = function (msg, code, th) { - var er = decorate(msg, code, this) - if (th) throw er - else this.emit('error', er) -} - -function decorate (er, code, self) { - if (!(er instanceof Error)) er = new Error(er) - er.code = er.code || code - er.path = er.path || self.path - er.fstream_type = er.fstream_type || self.type - er.fstream_path = er.fstream_path || self.path - if (self._path !== self.path) { - er.fstream_unc_path = er.fstream_unc_path || self._path - } - if (self.linkpath) { - er.fstream_linkpath = er.fstream_linkpath || self.linkpath - } - er.fstream_class = er.fstream_class || self.constructor.name - er.fstream_stack = er.fstream_stack || - new Error().stack.split(/\n/).slice(3).map(function (s) { - return s.replace(/^ {4}at /, '') - }) - - return er -} diff --git a/node_modules/fstream/lib/collect.js b/node_modules/fstream/lib/collect.js deleted file mode 100644 index e5d4f35833476..0000000000000 --- a/node_modules/fstream/lib/collect.js +++ /dev/null @@ -1,70 +0,0 @@ -module.exports = collect - -function collect (stream) { - if (stream._collected) return - - if (stream._paused) return stream.on('resume', collect.bind(null, stream)) - - stream._collected = true - stream.pause() - - stream.on('data', save) - stream.on('end', save) - var buf = [] - function save (b) { - if (typeof b === 'string') b = new Buffer(b) - if (Buffer.isBuffer(b) && !b.length) return - buf.push(b) - } - - stream.on('entry', saveEntry) - var entryBuffer = [] - function saveEntry (e) { - collect(e) - entryBuffer.push(e) - } - - stream.on('proxy', proxyPause) - function proxyPause (p) { - p.pause() - } - - // replace the pipe method with a new version that will - // unlock the buffered stuff. if you just call .pipe() - // without a destination, then it'll re-play the events. - stream.pipe = (function (orig) { - return function (dest) { - // console.error(' === open the pipes', dest && dest.path) - - // let the entries flow through one at a time. - // Once they're all done, then we can resume completely. - var e = 0 - ;(function unblockEntry () { - var entry = entryBuffer[e++] - // console.error(" ==== unblock entry", entry && entry.path) - if (!entry) return resume() - entry.on('end', unblockEntry) - if (dest) dest.add(entry) - else stream.emit('entry', entry) - })() - - function resume () { - stream.removeListener('entry', saveEntry) - stream.removeListener('data', save) - stream.removeListener('end', save) - - stream.pipe = orig - if (dest) stream.pipe(dest) - - buf.forEach(function (b) { - if (b) stream.emit('data', b) - else stream.emit('end') - }) - - stream.resume() - } - - return dest - } - })(stream.pipe) -} diff --git a/node_modules/fstream/lib/dir-reader.js b/node_modules/fstream/lib/dir-reader.js deleted file mode 100644 index 820cdc85a8e9c..0000000000000 --- a/node_modules/fstream/lib/dir-reader.js +++ /dev/null @@ -1,252 +0,0 @@ -// A thing that emits "entry" events with Reader objects -// Pausing it causes it to stop emitting entry events, and also -// pauses the current entry if there is one. - -module.exports = DirReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var path = require('path') -var Reader = require('./reader.js') -var assert = require('assert').ok - -inherits(DirReader, Reader) - -function DirReader (props) { - var self = this - if (!(self instanceof DirReader)) { - throw new Error('DirReader must be called as constructor.') - } - - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - throw new Error('Non-directory type ' + props.type) - } - - self.entries = null - self._index = -1 - self._paused = false - self._length = -1 - - if (props.sort) { - this.sort = props.sort - } - - Reader.call(this, props) -} - -DirReader.prototype._getEntries = function () { - var self = this - - // race condition. might pause() before calling _getEntries, - // and then resume, and try to get them a second time. - if (self._gotEntries) return - self._gotEntries = true - - fs.readdir(self._path, function (er, entries) { - if (er) return self.error(er) - - self.entries = entries - - self.emit('entries', entries) - if (self._paused) self.once('resume', processEntries) - else processEntries() - - function processEntries () { - self._length = self.entries.length - if (typeof self.sort === 'function') { - self.entries = self.entries.sort(self.sort.bind(self)) - } - self._read() - } - }) -} - -// start walking the dir, and emit an "entry" event for each one. -DirReader.prototype._read = function () { - var self = this - - if (!self.entries) return self._getEntries() - - if (self._paused || self._currentEntry || self._aborted) { - // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted) - return - } - - self._index++ - if (self._index >= self.entries.length) { - if (!self._ended) { - self._ended = true - self.emit('end') - self.emit('close') - } - return - } - - // ok, handle this one, then. - - // save creating a proxy, by stat'ing the thing now. - var p = path.resolve(self._path, self.entries[self._index]) - assert(p !== self._path) - assert(self.entries[self._index]) - - // set this to prevent trying to _read() again in the stat time. - self._currentEntry = p - fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) { - if (er) return self.error(er) - - var who = self._proxy || self - - stat.path = p - stat.basename = path.basename(p) - stat.dirname = path.dirname(p) - var childProps = self.getChildProps.call(who, stat) - childProps.path = p - childProps.basename = path.basename(p) - childProps.dirname = path.dirname(p) - - var entry = Reader(childProps, stat) - - // console.error("DR Entry", p, stat.size) - - self._currentEntry = entry - - // "entry" events are for direct entries in a specific dir. - // "child" events are for any and all children at all levels. - // This nomenclature is not completely final. - - entry.on('pause', function (who) { - if (!self._paused && !entry._disowned) { - self.pause(who) - } - }) - - entry.on('resume', function (who) { - if (self._paused && !entry._disowned) { - self.resume(who) - } - }) - - entry.on('stat', function (props) { - self.emit('_entryStat', entry, props) - if (entry._aborted) return - if (entry._paused) { - entry.once('resume', function () { - self.emit('entryStat', entry, props) - }) - } else self.emit('entryStat', entry, props) - }) - - entry.on('ready', function EMITCHILD () { - // console.error("DR emit child", entry._path) - if (self._paused) { - // console.error(" DR emit child - try again later") - // pause the child, and emit the "entry" event once we drain. - // console.error("DR pausing child entry") - entry.pause(self) - return self.once('resume', EMITCHILD) - } - - // skip over sockets. they can't be piped around properly, - // so there's really no sense even acknowledging them. - // if someone really wants to see them, they can listen to - // the "socket" events. - if (entry.type === 'Socket') { - self.emit('socket', entry) - } else { - self.emitEntry(entry) - } - }) - - var ended = false - entry.on('close', onend) - entry.on('disown', onend) - function onend () { - if (ended) return - ended = true - self.emit('childEnd', entry) - self.emit('entryEnd', entry) - self._currentEntry = null - if (!self._paused) { - self._read() - } - } - - // XXX Remove this. Works in node as of 0.6.2 or so. - // Long filenames should not break stuff. - entry.on('error', function (er) { - if (entry._swallowErrors) { - self.warn(er) - entry.emit('end') - entry.emit('close') - } else { - self.emit('error', er) - } - }) - - // proxy up some events. - ;[ - 'child', - 'childEnd', - 'warn' - ].forEach(function (ev) { - entry.on(ev, self.emit.bind(self, ev)) - }) - }) -} - -DirReader.prototype.disown = function (entry) { - entry.emit('beforeDisown') - entry._disowned = true - entry.parent = entry.root = null - if (entry === this._currentEntry) { - this._currentEntry = null - } - entry.emit('disown') -} - -DirReader.prototype.getChildProps = function () { - return { - depth: this.depth + 1, - root: this.root || this, - parent: this, - follow: this.follow, - filter: this.filter, - sort: this.props.sort, - hardlinks: this.props.hardlinks - } -} - -DirReader.prototype.pause = function (who) { - var self = this - if (self._paused) return - who = who || self - self._paused = true - if (self._currentEntry && self._currentEntry.pause) { - self._currentEntry.pause(who) - } - self.emit('pause', who) -} - -DirReader.prototype.resume = function (who) { - var self = this - if (!self._paused) return - who = who || self - - self._paused = false - // console.error('DR Emit Resume', self._path) - self.emit('resume', who) - if (self._paused) { - // console.error('DR Re-paused', self._path) - return - } - - if (self._currentEntry) { - if (self._currentEntry.resume) self._currentEntry.resume(who) - } else self._read() -} - -DirReader.prototype.emitEntry = function (entry) { - this.emit('entry', entry) - this.emit('child', entry) -} diff --git a/node_modules/fstream/lib/dir-writer.js b/node_modules/fstream/lib/dir-writer.js deleted file mode 100644 index ec50dca900dcf..0000000000000 --- a/node_modules/fstream/lib/dir-writer.js +++ /dev/null @@ -1,174 +0,0 @@ -// It is expected that, when .add() returns false, the consumer -// of the DirWriter will pause until a "drain" event occurs. Note -// that this is *almost always going to be the case*, unless the -// thing being written is some sort of unsupported type, and thus -// skipped over. - -module.exports = DirWriter - -var Writer = require('./writer.js') -var inherits = require('inherits') -var mkdir = require('mkdirp') -var path = require('path') -var collect = require('./collect.js') - -inherits(DirWriter, Writer) - -function DirWriter (props) { - var self = this - if (!(self instanceof DirWriter)) { - self.error('DirWriter must be called as constructor.', null, true) - } - - // should already be established as a Directory type - if (props.type !== 'Directory' || !props.Directory) { - self.error('Non-directory type ' + props.type + ' ' + - JSON.stringify(props), null, true) - } - - Writer.call(this, props) -} - -DirWriter.prototype._create = function () { - var self = this - mkdir(self._path, Writer.dirmode, function (er) { - if (er) return self.error(er) - // ready to start getting entries! - self.ready = true - self.emit('ready') - self._process() - }) -} - -// a DirWriter has an add(entry) method, but its .write() doesn't -// do anything. Why a no-op rather than a throw? Because this -// leaves open the door for writing directory metadata for -// gnu/solaris style dumpdirs. -DirWriter.prototype.write = function () { - return true -} - -DirWriter.prototype.end = function () { - this._ended = true - this._process() -} - -DirWriter.prototype.add = function (entry) { - var self = this - - // console.error('\tadd', entry._path, '->', self._path) - collect(entry) - if (!self.ready || self._currentEntry) { - self._buffer.push(entry) - return false - } - - // create a new writer, and pipe the incoming entry into it. - if (self._ended) { - return self.error('add after end') - } - - self._buffer.push(entry) - self._process() - - return this._buffer.length === 0 -} - -DirWriter.prototype._process = function () { - var self = this - - // console.error('DW Process p=%j', self._processing, self.basename) - - if (self._processing) return - - var entry = self._buffer.shift() - if (!entry) { - // console.error("DW Drain") - self.emit('drain') - if (self._ended) self._finish() - return - } - - self._processing = true - // console.error("DW Entry", entry._path) - - self.emit('entry', entry) - - // ok, add this entry - // - // don't allow recursive copying - var p = entry - var pp - do { - pp = p._path || p.path - if (pp === self.root._path || pp === self._path || - (pp && pp.indexOf(self._path) === 0)) { - // console.error('DW Exit (recursive)', entry.basename, self._path) - self._processing = false - if (entry._collected) entry.pipe() - return self._process() - } - p = p.parent - } while (p) - - // console.error("DW not recursive") - - // chop off the entry's root dir, replace with ours - var props = { - parent: self, - root: self.root || self, - type: entry.type, - depth: self.depth + 1 - } - - pp = entry._path || entry.path || entry.props.path - if (entry.parent) { - pp = pp.substr(entry.parent._path.length + 1) - } - // get rid of any ../../ shenanigans - props.path = path.join(self.path, path.join('/', pp)) - - // if i have a filter, the child should inherit it. - props.filter = self.filter - - // all the rest of the stuff, copy over from the source. - Object.keys(entry.props).forEach(function (k) { - if (!props.hasOwnProperty(k)) { - props[k] = entry.props[k] - } - }) - - // not sure at this point what kind of writer this is. - var child = self._currentChild = new Writer(props) - child.on('ready', function () { - // console.error("DW Child Ready", child.type, child._path) - // console.error(" resuming", entry._path) - entry.pipe(child) - entry.resume() - }) - - // XXX Make this work in node. - // Long filenames should not break stuff. - child.on('error', function (er) { - if (child._swallowErrors) { - self.warn(er) - child.emit('end') - child.emit('close') - } else { - self.emit('error', er) - } - }) - - // we fire _end internally *after* end, so that we don't move on - // until any "end" listeners have had their chance to do stuff. - child.on('close', onend) - var ended = false - function onend () { - if (ended) return - ended = true - // console.error("* DW Child end", child.basename) - self._currentChild = null - self._processing = false - self._process() - } -} diff --git a/node_modules/fstream/lib/file-reader.js b/node_modules/fstream/lib/file-reader.js deleted file mode 100644 index baa01f4b3db7e..0000000000000 --- a/node_modules/fstream/lib/file-reader.js +++ /dev/null @@ -1,150 +0,0 @@ -// Basically just a wrapper around an fs.ReadStream - -module.exports = FileReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var Reader = require('./reader.js') -var EOF = {EOF: true} -var CLOSE = {CLOSE: true} - -inherits(FileReader, Reader) - -function FileReader (props) { - // console.error(" FR create", props.path, props.size, new Error().stack) - var self = this - if (!(self instanceof FileReader)) { - throw new Error('FileReader must be called as constructor.') - } - - // should already be established as a File type - // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, - // with a HardLinkReader class. - if (!((props.type === 'Link' && props.Link) || - (props.type === 'File' && props.File))) { - throw new Error('Non-file type ' + props.type) - } - - self._buffer = [] - self._bytesEmitted = 0 - Reader.call(self, props) -} - -FileReader.prototype._getStream = function () { - var self = this - var stream = self._stream = fs.createReadStream(self._path, self.props) - - if (self.props.blksize) { - stream.bufferSize = self.props.blksize - } - - stream.on('open', self.emit.bind(self, 'open')) - - stream.on('data', function (c) { - // console.error('\t\t%d %s', c.length, self.basename) - self._bytesEmitted += c.length - // no point saving empty chunks - if (!c.length) { - return - } else if (self._paused || self._buffer.length) { - self._buffer.push(c) - self._read() - } else self.emit('data', c) - }) - - stream.on('end', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering End', self._path) - self._buffer.push(EOF) - self._read() - } else { - self.emit('end') - } - - if (self._bytesEmitted !== self.props.size) { - self.error("Didn't get expected byte count\n" + - 'expect: ' + self.props.size + '\n' + - 'actual: ' + self._bytesEmitted) - } - }) - - stream.on('close', function () { - if (self._paused || self._buffer.length) { - // console.error('FR Buffering Close', self._path) - self._buffer.push(CLOSE) - self._read() - } else { - // console.error('FR close 1', self._path) - self.emit('close') - } - }) - - stream.on('error', function (e) { - self.emit('error', e) - }) - - self._read() -} - -FileReader.prototype._read = function () { - var self = this - // console.error('FR _read', self._path) - if (self._paused) { - // console.error('FR _read paused', self._path) - return - } - - if (!self._stream) { - // console.error('FR _getStream calling', self._path) - return self._getStream() - } - - // clear out the buffer, if there is one. - if (self._buffer.length) { - // console.error('FR _read has buffer', self._buffer.length, self._path) - var buf = self._buffer - for (var i = 0, l = buf.length; i < l; i++) { - var c = buf[i] - if (c === EOF) { - // console.error('FR Read emitting buffered end', self._path) - self.emit('end') - } else if (c === CLOSE) { - // console.error('FR Read emitting buffered close', self._path) - self.emit('close') - } else { - // console.error('FR Read emitting buffered data', self._path) - self.emit('data', c) - } - - if (self._paused) { - // console.error('FR Read Re-pausing at '+i, self._path) - self._buffer = buf.slice(i) - return - } - } - self._buffer.length = 0 - } -// console.error("FR _read done") -// that's about all there is to it. -} - -FileReader.prototype.pause = function (who) { - var self = this - // console.error('FR Pause', self._path) - if (self._paused) return - who = who || self - self._paused = true - if (self._stream) self._stream.pause() - self.emit('pause', who) -} - -FileReader.prototype.resume = function (who) { - var self = this - // console.error('FR Resume', self._path) - if (!self._paused) return - who = who || self - self.emit('resume', who) - self._paused = false - if (self._stream) self._stream.resume() - self._read() -} diff --git a/node_modules/fstream/lib/file-writer.js b/node_modules/fstream/lib/file-writer.js deleted file mode 100644 index 4c803d8d68d2f..0000000000000 --- a/node_modules/fstream/lib/file-writer.js +++ /dev/null @@ -1,107 +0,0 @@ -module.exports = FileWriter - -var fs = require('graceful-fs') -var Writer = require('./writer.js') -var inherits = require('inherits') -var EOF = {} - -inherits(FileWriter, Writer) - -function FileWriter (props) { - var self = this - if (!(self instanceof FileWriter)) { - throw new Error('FileWriter must be called as constructor.') - } - - // should already be established as a File type - if (props.type !== 'File' || !props.File) { - throw new Error('Non-file type ' + props.type) - } - - self._buffer = [] - self._bytesWritten = 0 - - Writer.call(this, props) -} - -FileWriter.prototype._create = function () { - var self = this - if (self._stream) return - - var so = {} - if (self.props.flags) so.flags = self.props.flags - so.mode = Writer.filemode - if (self._old && self._old.blksize) so.bufferSize = self._old.blksize - - self._stream = fs.createWriteStream(self._path, so) - - self._stream.on('open', function () { - // console.error("FW open", self._buffer, self._path) - self.ready = true - self._buffer.forEach(function (c) { - if (c === EOF) self._stream.end() - else self._stream.write(c) - }) - self.emit('ready') - // give this a kick just in case it needs it. - self.emit('drain') - }) - - self._stream.on('error', function (er) { self.emit('error', er) }) - - self._stream.on('drain', function () { self.emit('drain') }) - - self._stream.on('close', function () { - // console.error('\n\nFW Stream Close', self._path, self.size) - self._finish() - }) -} - -FileWriter.prototype.write = function (c) { - var self = this - - self._bytesWritten += c.length - - if (!self.ready) { - if (!Buffer.isBuffer(c) && typeof c !== 'string') { - throw new Error('invalid write data') - } - self._buffer.push(c) - return false - } - - var ret = self._stream.write(c) - // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length) - - // allow 2 buffered writes, because otherwise there's just too - // much stop and go bs. - if (ret === false && self._stream._queue) { - return self._stream._queue.length <= 2 - } else { - return ret - } -} - -FileWriter.prototype.end = function (c) { - var self = this - - if (c) self.write(c) - - if (!self.ready) { - self._buffer.push(EOF) - return false - } - - return self._stream.end() -} - -FileWriter.prototype._finish = function () { - var self = this - if (typeof self.size === 'number' && self._bytesWritten !== self.size) { - self.error( - 'Did not get expected byte count.\n' + - 'expect: ' + self.size + '\n' + - 'actual: ' + self._bytesWritten) - } - Writer.prototype._finish.call(self) -} diff --git a/node_modules/fstream/lib/get-type.js b/node_modules/fstream/lib/get-type.js deleted file mode 100644 index 19f6a657db847..0000000000000 --- a/node_modules/fstream/lib/get-type.js +++ /dev/null @@ -1,33 +0,0 @@ -module.exports = getType - -function getType (st) { - var types = [ - 'Directory', - 'File', - 'SymbolicLink', - 'Link', // special for hardlinks from tarballs - 'BlockDevice', - 'CharacterDevice', - 'FIFO', - 'Socket' - ] - var type - - if (st.type && types.indexOf(st.type) !== -1) { - st[st.type] = true - return st.type - } - - for (var i = 0, l = types.length; i < l; i++) { - type = types[i] - var is = st[type] || st['is' + type] - if (typeof is === 'function') is = is.call(st) - if (is) { - st[type] = true - st.type = type - return type - } - } - - return null -} diff --git a/node_modules/fstream/lib/link-reader.js b/node_modules/fstream/lib/link-reader.js deleted file mode 100644 index fb4cc67a98dc5..0000000000000 --- a/node_modules/fstream/lib/link-reader.js +++ /dev/null @@ -1,53 +0,0 @@ -// Basically just a wrapper around an fs.readlink -// -// XXX: Enhance this to support the Link type, by keeping -// a lookup table of {:}, so that hardlinks -// can be preserved in tarballs. - -module.exports = LinkReader - -var fs = require('graceful-fs') -var inherits = require('inherits') -var Reader = require('./reader.js') - -inherits(LinkReader, Reader) - -function LinkReader (props) { - var self = this - if (!(self instanceof LinkReader)) { - throw new Error('LinkReader must be called as constructor.') - } - - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) - } - - Reader.call(self, props) -} - -// When piping a LinkReader into a LinkWriter, we have to -// already have the linkpath property set, so that has to -// happen *before* the "ready" event, which means we need to -// override the _stat method. -LinkReader.prototype._stat = function (currentStat) { - var self = this - fs.readlink(self._path, function (er, linkpath) { - if (er) return self.error(er) - self.linkpath = self.props.linkpath = linkpath - self.emit('linkpath', linkpath) - Reader.prototype._stat.call(self, currentStat) - }) -} - -LinkReader.prototype._read = function () { - var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we need - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } -} diff --git a/node_modules/fstream/lib/link-writer.js b/node_modules/fstream/lib/link-writer.js deleted file mode 100644 index af54284008faa..0000000000000 --- a/node_modules/fstream/lib/link-writer.js +++ /dev/null @@ -1,95 +0,0 @@ -module.exports = LinkWriter - -var fs = require('graceful-fs') -var Writer = require('./writer.js') -var inherits = require('inherits') -var path = require('path') -var rimraf = require('rimraf') - -inherits(LinkWriter, Writer) - -function LinkWriter (props) { - var self = this - if (!(self instanceof LinkWriter)) { - throw new Error('LinkWriter must be called as constructor.') - } - - // should already be established as a Link type - if (!((props.type === 'Link' && props.Link) || - (props.type === 'SymbolicLink' && props.SymbolicLink))) { - throw new Error('Non-link type ' + props.type) - } - - if (props.linkpath === '') props.linkpath = '.' - if (!props.linkpath) { - self.error('Need linkpath property to create ' + props.type) - } - - Writer.call(this, props) -} - -LinkWriter.prototype._create = function () { - // console.error(" LW _create") - var self = this - var hard = self.type === 'Link' || process.platform === 'win32' - var link = hard ? 'link' : 'symlink' - var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath - - // can only change the link path by clobbering - // For hard links, let's just assume that's always the case, since - // there's no good way to read them if we don't already know. - if (hard) return clobber(self, lp, link) - - fs.readlink(self._path, function (er, p) { - // only skip creation if it's exactly the same link - if (p && p === lp) return finish(self) - clobber(self, lp, link) - }) -} - -function clobber (self, lp, link) { - rimraf(self._path, function (er) { - if (er) return self.error(er) - create(self, lp, link) - }) -} - -function create (self, lp, link) { - fs[link](lp, self._path, function (er) { - // if this is a hard link, and we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier in. - // Additionally, an EPERM or EACCES can happen on win32 if it's trying - // to make a link to a directory. Again, just skip it. - // A better solution would be to have fs.symlink be supported on - // windows in some nice fashion. - if (er) { - if ((er.code === 'ENOENT' || - er.code === 'EACCES' || - er.code === 'EPERM') && process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - } else return self.error(er) - } - finish(self) - }) -} - -function finish (self) { - self.ready = true - self.emit('ready') - if (self._ended && !self._finished) self._finish() -} - -LinkWriter.prototype.end = function () { - // console.error("LW finish in end") - this._ended = true - if (this.ready) { - this._finished = true - this._finish() - } -} diff --git a/node_modules/fstream/lib/proxy-reader.js b/node_modules/fstream/lib/proxy-reader.js deleted file mode 100644 index 4f431c9d9e27d..0000000000000 --- a/node_modules/fstream/lib/proxy-reader.js +++ /dev/null @@ -1,95 +0,0 @@ -// A reader for when we don't yet know what kind of thing -// the thing is. - -module.exports = ProxyReader - -var Reader = require('./reader.js') -var getType = require('./get-type.js') -var inherits = require('inherits') -var fs = require('graceful-fs') - -inherits(ProxyReader, Reader) - -function ProxyReader (props) { - var self = this - if (!(self instanceof ProxyReader)) { - throw new Error('ProxyReader must be called as constructor.') - } - - self.props = props - self._buffer = [] - self.ready = false - - Reader.call(self, props) -} - -ProxyReader.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } - - props[type] = true - props.type = self.type = type - - self._old = current - self._addProxy(Reader(props, current)) - }) -} - -ProxyReader.prototype._addProxy = function (proxy) { - var self = this - if (self._proxyTarget) { - return self.error('proxy already set') - } - - self._proxyTarget = proxy - proxy._proxy = self - - ;[ - 'error', - 'data', - 'end', - 'close', - 'linkpath', - 'entry', - 'entryEnd', - 'child', - 'childEnd', - 'warn', - 'stat' - ].forEach(function (ev) { - // console.error('~~ proxy event', ev, self.path) - proxy.on(ev, self.emit.bind(self, ev)) - }) - - self.emit('proxy', proxy) - - proxy.on('ready', function () { - // console.error("~~ proxy is ready!", self.path) - self.ready = true - self.emit('ready') - }) - - var calls = self._buffer - self._buffer.length = 0 - calls.forEach(function (c) { - proxy[c[0]].apply(proxy, c[1]) - }) -} - -ProxyReader.prototype.pause = function () { - return this._proxyTarget ? this._proxyTarget.pause() : false -} - -ProxyReader.prototype.resume = function () { - return this._proxyTarget ? this._proxyTarget.resume() : false -} diff --git a/node_modules/fstream/lib/proxy-writer.js b/node_modules/fstream/lib/proxy-writer.js deleted file mode 100644 index a6544621bfbe7..0000000000000 --- a/node_modules/fstream/lib/proxy-writer.js +++ /dev/null @@ -1,111 +0,0 @@ -// A writer for when we don't know what kind of thing -// the thing is. That is, it's not explicitly set, -// so we're going to make it whatever the thing already -// is, or "File" -// -// Until then, collect all events. - -module.exports = ProxyWriter - -var Writer = require('./writer.js') -var getType = require('./get-type.js') -var inherits = require('inherits') -var collect = require('./collect.js') -var fs = require('fs') - -inherits(ProxyWriter, Writer) - -function ProxyWriter (props) { - var self = this - if (!(self instanceof ProxyWriter)) { - throw new Error('ProxyWriter must be called as constructor.') - } - - self.props = props - self._needDrain = false - - Writer.call(self, props) -} - -ProxyWriter.prototype._stat = function () { - var self = this - var props = self.props - // stat the thing to see what the proxy should be. - var stat = props.follow ? 'stat' : 'lstat' - - fs[stat](props.path, function (er, current) { - var type - if (er || !current) { - type = 'File' - } else { - type = getType(current) - } - - props[type] = true - props.type = self.type = type - - self._old = current - self._addProxy(Writer(props, current)) - }) -} - -ProxyWriter.prototype._addProxy = function (proxy) { - // console.error("~~ set proxy", this.path) - var self = this - if (self._proxy) { - return self.error('proxy already set') - } - - self._proxy = proxy - ;[ - 'ready', - 'error', - 'close', - 'pipe', - 'drain', - 'warn' - ].forEach(function (ev) { - proxy.on(ev, self.emit.bind(self, ev)) - }) - - self.emit('proxy', proxy) - - var calls = self._buffer - calls.forEach(function (c) { - // console.error("~~ ~~ proxy buffered call", c[0], c[1]) - proxy[c[0]].apply(proxy, c[1]) - }) - self._buffer.length = 0 - if (self._needsDrain) self.emit('drain') -} - -ProxyWriter.prototype.add = function (entry) { - // console.error("~~ proxy add") - collect(entry) - - if (!this._proxy) { - this._buffer.push(['add', [entry]]) - this._needDrain = true - return false - } - return this._proxy.add(entry) -} - -ProxyWriter.prototype.write = function (c) { - // console.error('~~ proxy write') - if (!this._proxy) { - this._buffer.push(['write', [c]]) - this._needDrain = true - return false - } - return this._proxy.write(c) -} - -ProxyWriter.prototype.end = function (c) { - // console.error('~~ proxy end') - if (!this._proxy) { - this._buffer.push(['end', [c]]) - return false - } - return this._proxy.end(c) -} diff --git a/node_modules/fstream/lib/reader.js b/node_modules/fstream/lib/reader.js deleted file mode 100644 index be4f570eeb281..0000000000000 --- a/node_modules/fstream/lib/reader.js +++ /dev/null @@ -1,255 +0,0 @@ -module.exports = Reader - -var fs = require('graceful-fs') -var Stream = require('stream').Stream -var inherits = require('inherits') -var path = require('path') -var getType = require('./get-type.js') -var hardLinks = Reader.hardLinks = {} -var Abstract = require('./abstract.js') - -// Must do this *before* loading the child classes -inherits(Reader, Abstract) - -var LinkReader = require('./link-reader.js') - -function Reader (props, currentStat) { - var self = this - if (!(self instanceof Reader)) return new Reader(props, currentStat) - - if (typeof props === 'string') { - props = { path: props } - } - - // polymorphism. - // call fstream.Reader(dir) to get a DirReader object, etc. - // Note that, unlike in the Writer case, ProxyReader is going - // to be the *normal* state of affairs, since we rarely know - // the type of a file prior to reading it. - - var type - var ClassType - - if (props.type && typeof props.type === 'function') { - type = props.type - ClassType = type - } else { - type = getType(props) - ClassType = Reader - } - - if (currentStat && !type) { - type = getType(currentStat) - props[type] = true - props.type = type - } - - switch (type) { - case 'Directory': - ClassType = require('./dir-reader.js') - break - - case 'Link': - // XXX hard links are just files. - // However, it would be good to keep track of files' dev+inode - // and nlink values, and create a HardLinkReader that emits - // a linkpath value of the original copy, so that the tar - // writer can preserve them. - // ClassType = HardLinkReader - // break - - case 'File': - ClassType = require('./file-reader.js') - break - - case 'SymbolicLink': - ClassType = LinkReader - break - - case 'Socket': - ClassType = require('./socket-reader.js') - break - - case null: - ClassType = require('./proxy-reader.js') - break - } - - if (!(self instanceof ClassType)) { - return new ClassType(props) - } - - Abstract.call(self) - - if (!props.path) { - self.error('Must provide a path', null, true) - } - - self.readable = true - self.writable = false - - self.type = type - self.props = props - self.depth = props.depth = props.depth || 0 - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self - - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - // how DOES one create files on the moon? - // if the path has spaces in it, then UNC will fail. - self._swallowErrors = true - // if (self._path.indexOf(" ") === -1) { - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - // } - } - } - self.basename = props.basename = path.basename(self.path) - self.dirname = props.dirname = path.dirname(self.path) - - // these have served their purpose, and are now just noisy clutter - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size - self.filter = typeof props.filter === 'function' ? props.filter : null - if (props.sort === 'alpha') props.sort = alphasort - - // start the ball rolling. - // this will stat the thing, and then call self._read() - // to start reading whatever it is. - // console.error("calling stat", props.path, currentStat) - self._stat(currentStat) -} - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - -Reader.prototype._stat = function (currentStat) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - // console.error("Reader._stat", self._path, currentStat) - if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) - else fs[stat](self._path, statCb) - - function statCb (er, props_) { - // console.error("Reader._stat, statCb", self._path, props_, props_.nlink) - if (er) return self.error(er) - - Object.keys(props_).forEach(function (k) { - props[k] = props_[k] - }) - - // if it's not the expected size, then abort here. - if (undefined !== self.size && props.size !== self.size) { - return self.error('incorrect size') - } - self.size = props.size - - var type = getType(props) - var handleHardlinks = props.hardlinks !== false - - // special little thing for handling hardlinks. - if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) { - var k = props.dev + ':' + props.ino - // console.error("Reader has nlink", self._path, k) - if (hardLinks[k] === self._path || !hardLinks[k]) { - hardLinks[k] = self._path - } else { - // switch into hardlink mode. - type = self.type = self.props.type = 'Link' - self.Link = self.props.Link = true - self.linkpath = self.props.linkpath = hardLinks[k] - // console.error("Hardlink detected, switching mode", self._path, self.linkpath) - // Setting __proto__ would arguably be the "correct" - // approach here, but that just seems too wrong. - self._stat = self._read = LinkReader.prototype._read - } - } - - if (self.type && self.type !== type) { - self.error('Unexpected type: ' + type) - } - - // if the filter doesn't pass, then just skip over this one. - // still have to emit end so that dir-walking can move on. - if (self.filter) { - var who = self._proxy || self - // special handling for ProxyReaders - if (!self.filter.call(who, who, props)) { - if (!self._disowned) { - self.abort() - self.emit('end') - self.emit('close') - } - return - } - } - - // last chance to abort or disown before the flow starts! - var events = ['_stat', 'stat', 'ready'] - var e = 0 - ;(function go () { - if (self._aborted) { - self.emit('end') - self.emit('close') - return - } - - if (self._paused && self.type !== 'Directory') { - self.once('resume', go) - return - } - - var ev = events[e++] - if (!ev) { - return self._read() - } - self.emit(ev, props) - go() - })() - } -} - -Reader.prototype.pipe = function (dest) { - var self = this - if (typeof dest.add === 'function') { - // piping to a multi-compatible, and we've got directory entries. - self.on('entry', function (entry) { - var ret = dest.add(entry) - if (ret === false) { - self.pause() - } - }) - } - - // console.error("R Pipe apply Stream Pipe") - return Stream.prototype.pipe.apply(this, arguments) -} - -Reader.prototype.pause = function (who) { - this._paused = true - who = who || this - this.emit('pause', who) - if (this._stream) this._stream.pause(who) -} - -Reader.prototype.resume = function (who) { - this._paused = false - who = who || this - this.emit('resume', who) - if (this._stream) this._stream.resume(who) - this._read() -} - -Reader.prototype._read = function () { - this.error('Cannot read unknown type: ' + this.type) -} diff --git a/node_modules/fstream/lib/socket-reader.js b/node_modules/fstream/lib/socket-reader.js deleted file mode 100644 index e0456ba890ede..0000000000000 --- a/node_modules/fstream/lib/socket-reader.js +++ /dev/null @@ -1,36 +0,0 @@ -// Just get the stats, and then don't do anything. -// You can't really "read" from a socket. You "connect" to it. -// Mostly, this is here so that reading a dir with a socket in it -// doesn't blow up. - -module.exports = SocketReader - -var inherits = require('inherits') -var Reader = require('./reader.js') - -inherits(SocketReader, Reader) - -function SocketReader (props) { - var self = this - if (!(self instanceof SocketReader)) { - throw new Error('SocketReader must be called as constructor.') - } - - if (!(props.type === 'Socket' && props.Socket)) { - throw new Error('Non-socket type ' + props.type) - } - - Reader.call(self, props) -} - -SocketReader.prototype._read = function () { - var self = this - if (self._paused) return - // basically just a no-op, since we got all the info we have - // from the _stat method - if (!self._ended) { - self.emit('end') - self.emit('close') - self._ended = true - } -} diff --git a/node_modules/fstream/lib/writer.js b/node_modules/fstream/lib/writer.js deleted file mode 100644 index 3f10547820fad..0000000000000 --- a/node_modules/fstream/lib/writer.js +++ /dev/null @@ -1,390 +0,0 @@ -module.exports = Writer - -var fs = require('graceful-fs') -var inherits = require('inherits') -var rimraf = require('rimraf') -var mkdir = require('mkdirp') -var path = require('path') -var umask = process.platform === 'win32' ? 0 : process.umask() -var getType = require('./get-type.js') -var Abstract = require('./abstract.js') - -// Must do this *before* loading the child classes -inherits(Writer, Abstract) - -Writer.dirmode = parseInt('0777', 8) & (~umask) -Writer.filemode = parseInt('0666', 8) & (~umask) - -var DirWriter = require('./dir-writer.js') -var LinkWriter = require('./link-writer.js') -var FileWriter = require('./file-writer.js') -var ProxyWriter = require('./proxy-writer.js') - -// props is the desired state. current is optionally the current stat, -// provided here so that subclasses can avoid statting the target -// more than necessary. -function Writer (props, current) { - var self = this - - if (typeof props === 'string') { - props = { path: props } - } - - // polymorphism. - // call fstream.Writer(dir) to get a DirWriter object, etc. - var type = getType(props) - var ClassType = Writer - - switch (type) { - case 'Directory': - ClassType = DirWriter - break - case 'File': - ClassType = FileWriter - break - case 'Link': - case 'SymbolicLink': - ClassType = LinkWriter - break - case null: - default: - // Don't know yet what type to create, so we wrap in a proxy. - ClassType = ProxyWriter - break - } - - if (!(self instanceof ClassType)) return new ClassType(props) - - // now get down to business. - - Abstract.call(self) - - if (!props.path) self.error('Must provide a path', null, true) - - // props is what we want to set. - // set some convenience properties as well. - self.type = props.type - self.props = props - self.depth = props.depth || 0 - self.clobber = props.clobber === false ? props.clobber : true - self.parent = props.parent || null - self.root = props.root || (props.parent && props.parent.root) || self - - self._path = self.path = path.resolve(props.path) - if (process.platform === 'win32') { - self.path = self._path = self.path.replace(/\?/g, '_') - if (self._path.length >= 260) { - self._swallowErrors = true - self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') - } - } - self.basename = path.basename(props.path) - self.dirname = path.dirname(props.path) - self.linkpath = props.linkpath || null - - props.parent = props.root = null - - // console.error("\n\n\n%s setting size to", props.path, props.size) - self.size = props.size - - if (typeof props.mode === 'string') { - props.mode = parseInt(props.mode, 8) - } - - self.readable = false - self.writable = true - - // buffer until ready, or while handling another entry - self._buffer = [] - self.ready = false - - self.filter = typeof props.filter === 'function' ? props.filter : null - - // start the ball rolling. - // this checks what's there already, and then calls - // self._create() to call the impl-specific creation stuff. - self._stat(current) -} - -// Calling this means that it's something we can't create. -// Just assert that it's already there, otherwise raise a warning. -Writer.prototype._create = function () { - var self = this - fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) { - if (er) { - return self.warn('Cannot create ' + self._path + '\n' + - 'Unsupported type: ' + self.type, 'ENOTSUP') - } - self._finish() - }) -} - -Writer.prototype._stat = function (current) { - var self = this - var props = self.props - var stat = props.follow ? 'stat' : 'lstat' - var who = self._proxy || self - - if (current) statCb(null, current) - else fs[stat](self._path, statCb) - - function statCb (er, current) { - if (self.filter && !self.filter.call(who, who, current)) { - self._aborted = true - self.emit('end') - self.emit('close') - return - } - - // if it's not there, great. We'll just create it. - // if it is there, then we'll need to change whatever differs - if (er || !current) { - return create(self) - } - - self._old = current - var currentType = getType(current) - - // if it's a type change, then we need to clobber or error. - // if it's not a type change, then let the impl take care of it. - if (currentType !== self.type || self.type === 'File' && current.nlink > 1) { - return rimraf(self._path, function (er) { - if (er) return self.error(er) - self._old = null - create(self) - }) - } - - // otherwise, just handle in the app-specific way - // this creates a fs.WriteStream, or mkdir's, or whatever - create(self) - } -} - -function create (self) { - // console.error("W create", self._path, Writer.dirmode) - - // XXX Need to clobber non-dirs that are in the way, - // unless { clobber: false } in the props. - mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) { - // console.error("W created", path.dirname(self._path), er) - if (er) return self.error(er) - - // later on, we have to set the mode and owner for these - self._madeDir = made - return self._create() - }) -} - -function endChmod (self, want, current, path, cb) { - var wantMode = want.mode - var chmod = want.follow || self.type !== 'SymbolicLink' - ? 'chmod' : 'lchmod' - - if (!fs[chmod]) return cb() - if (typeof wantMode !== 'number') return cb() - - var curMode = current.mode & parseInt('0777', 8) - wantMode = wantMode & parseInt('0777', 8) - if (wantMode === curMode) return cb() - - fs[chmod](path, wantMode, cb) -} - -function endChown (self, want, current, path, cb) { - // Don't even try it unless root. Too easy to EPERM. - if (process.platform === 'win32') return cb() - if (!process.getuid || process.getuid() !== 0) return cb() - if (typeof want.uid !== 'number' && - typeof want.gid !== 'number') return cb() - - if (current.uid === want.uid && - current.gid === want.gid) return cb() - - var chown = (self.props.follow || self.type !== 'SymbolicLink') - ? 'chown' : 'lchown' - if (!fs[chown]) return cb() - - if (typeof want.uid !== 'number') want.uid = current.uid - if (typeof want.gid !== 'number') want.gid = current.gid - - fs[chown](path, want.uid, want.gid, cb) -} - -function endUtimes (self, want, current, path, cb) { - if (!fs.utimes || process.platform === 'win32') return cb() - - var utimes = (want.follow || self.type !== 'SymbolicLink') - ? 'utimes' : 'lutimes' - - if (utimes === 'lutimes' && !fs[utimes]) { - utimes = 'utimes' - } - - if (!fs[utimes]) return cb() - - var curA = current.atime - var curM = current.mtime - var meA = want.atime - var meM = want.mtime - - if (meA === undefined) meA = curA - if (meM === undefined) meM = curM - - if (!isDate(meA)) meA = new Date(meA) - if (!isDate(meM)) meA = new Date(meM) - - if (meA.getTime() === curA.getTime() && - meM.getTime() === curM.getTime()) return cb() - - fs[utimes](path, meA, meM, cb) -} - -// XXX This function is beastly. Break it up! -Writer.prototype._finish = function () { - var self = this - - if (self._finishing) return - self._finishing = true - - // console.error(" W Finish", self._path, self.size) - - // set up all the things. - // At this point, we're already done writing whatever we've gotta write, - // adding files to the dir, etc. - var todo = 0 - var errState = null - var done = false - - if (self._old) { - // the times will almost *certainly* have changed. - // adds the utimes syscall, but remove another stat. - self._old.atime = new Date(0) - self._old.mtime = new Date(0) - // console.error(" W Finish Stale Stat", self._path, self.size) - setProps(self._old) - } else { - var stat = self.props.follow ? 'stat' : 'lstat' - // console.error(" W Finish Stating", self._path, self.size) - fs[stat](self._path, function (er, current) { - // console.error(" W Finish Stated", self._path, self.size, current) - if (er) { - // if we're in the process of writing out a - // directory, it's very possible that the thing we're linking to - // doesn't exist yet (especially if it was intended as a symlink), - // so swallow ENOENT errors here and just soldier on. - if (er.code === 'ENOENT' && - (self.type === 'Link' || self.type === 'SymbolicLink') && - process.platform === 'win32') { - self.ready = true - self.emit('ready') - self.emit('end') - self.emit('close') - self.end = self._finish = function () {} - return - } else return self.error(er) - } - setProps(self._old = current) - }) - } - - return - - function setProps (current) { - todo += 3 - endChmod(self, self.props, current, self._path, next('chmod')) - endChown(self, self.props, current, self._path, next('chown')) - endUtimes(self, self.props, current, self._path, next('utimes')) - } - - function next (what) { - return function (er) { - // console.error(" W Finish", what, todo) - if (errState) return - if (er) { - er.fstream_finish_call = what - return self.error(errState = er) - } - if (--todo > 0) return - if (done) return - done = true - - // we may still need to set the mode/etc. on some parent dirs - // that were created previously. delay end/close until then. - if (!self._madeDir) return end() - else endMadeDir(self, self._path, end) - - function end (er) { - if (er) { - er.fstream_finish_call = 'setupMadeDir' - return self.error(er) - } - // all the props have been set, so we're completely done. - self.emit('end') - self.emit('close') - } - } - } -} - -function endMadeDir (self, p, cb) { - var made = self._madeDir - // everything *between* made and path.dirname(self._path) - // needs to be set up. Note that this may just be one dir. - var d = path.dirname(p) - - endMadeDir_(self, d, function (er) { - if (er) return cb(er) - if (d === made) { - return cb() - } - endMadeDir(self, d, cb) - }) -} - -function endMadeDir_ (self, p, cb) { - var dirProps = {} - Object.keys(self.props).forEach(function (k) { - dirProps[k] = self.props[k] - - // only make non-readable dirs if explicitly requested. - if (k === 'mode' && self.type !== 'Directory') { - dirProps[k] = dirProps[k] | parseInt('0111', 8) - } - }) - - var todo = 3 - var errState = null - fs.stat(p, function (er, current) { - if (er) return cb(errState = er) - endChmod(self, dirProps, current, p, next) - endChown(self, dirProps, current, p, next) - endUtimes(self, dirProps, current, p, next) - }) - - function next (er) { - if (errState) return - if (er) return cb(errState = er) - if (--todo === 0) return cb() - } -} - -Writer.prototype.pipe = function () { - this.error("Can't pipe from writable stream") -} - -Writer.prototype.add = function () { - this.error("Can't add to non-Directory type") -} - -Writer.prototype.write = function () { - return true -} - -function objectToString (d) { - return Object.prototype.toString.call(d) -} - -function isDate (d) { - return typeof d === 'object' && objectToString(d) === '[object Date]' -} diff --git a/node_modules/fstream/package.json b/node_modules/fstream/package.json deleted file mode 100644 index f90ce2c450ee8..0000000000000 --- a/node_modules/fstream/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "_from": "fstream@^1.0.0", - "_id": "fstream@1.0.12", - "_inBundle": false, - "_integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", - "_location": "/fstream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "fstream@^1.0.0", - "name": "fstream", - "escapedName": "fstream", - "rawSpec": "^1.0.0", - "saveSpec": null, - "fetchSpec": "^1.0.0" - }, - "_requiredBy": [ - "/node-gyp", - "/node-gyp/tar" - ], - "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", - "_shasum": "4e8ba8ee2d48be4f7d0de505455548eae5932045", - "_spec": "fstream@^1.0.0", - "_where": "/Users/isaacs/dev/npm/cli/node_modules/node-gyp", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/npm/fstream/issues" - }, - "bundleDependencies": false, - "dependencies": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - }, - "deprecated": false, - "description": "Advanced file system stream things", - "devDependencies": { - "standard": "^4.0.0", - "tap": "^1.2.0" - }, - "engines": { - "node": ">=0.6" - }, - "homepage": "https://github.com/npm/fstream#readme", - "license": "ISC", - "main": "fstream.js", - "name": "fstream", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fstream.git" - }, - "scripts": { - "test": "standard && tap examples/*.js" - }, - "version": "1.0.12" -} diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE index 19129e315fe59..42ca266df1d52 100644 --- a/node_modules/glob/LICENSE +++ b/node_modules/glob/LICENSE @@ -13,3 +13,9 @@ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md index baa1d1ba86506..e71b967ea2880 100644 --- a/node_modules/glob/README.md +++ b/node_modules/glob/README.md @@ -7,7 +7,7 @@ Match files using the patterns the shell uses, like stars and stuff. This is a glob implementation in JavaScript. It uses the `minimatch` library to do its matching. -![](oh-my-glob.gif) +![](logo/glob.png) ## Usage @@ -347,6 +347,11 @@ Users are thus advised not to use a glob result as a guarantee of filesystem state in the face of rapid changes. For the vast majority of operations, this is never a problem. +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + ## Contributing Any change to behavior (including bugfixes) must come with a test. diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json index 7c64de2751e5f..788801e299e45 100644 --- a/node_modules/glob/package.json +++ b/node_modules/glob/package.json @@ -1,40 +1,39 @@ { - "_from": "glob@7.1.3", - "_id": "glob@7.1.3", + "_from": "glob@7.1.4", + "_id": "glob@7.1.4", "_inBundle": false, - "_integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "_integrity": "sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==", "_location": "/glob", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "glob@7.1.3", + "raw": "glob@7.1.4", "name": "glob", "escapedName": "glob", - "rawSpec": "7.1.3", + "rawSpec": "7.1.4", "saveSpec": null, - "fetchSpec": "7.1.3" + "fetchSpec": "7.1.4" }, "_requiredBy": [ "#USER", "/", - "/cacache", "/deglob", "/eslint", - "/globby", "/init-package-json", "/node-gyp", - "/npm-registry-fetch/cacache", + "/nyc", "/pacote", "/read-package-json", "/rimraf", "/tap", - "/tap-mocha-reporter" + "/tap-mocha-reporter", + "/test-exclude" ], - "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "_shasum": "3960832d3f1574108342dafd3a67b332c0969df1", - "_spec": "glob@7.1.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/glob/-/glob-7.1.4.tgz", + "_shasum": "aa608a2f6c577ad357e1ae5a5c26d9a8d1969255", + "_spec": "glob@7.1.4", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -85,5 +84,5 @@ "test": "tap test/*.js --cov", "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js" }, - "version": "7.1.3" + "version": "7.1.4" } diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js index 3b94763a76eef..f71f2d93294a6 100644 --- a/node_modules/inherits/inherits.js +++ b/node_modules/inherits/inherits.js @@ -1,7 +1,9 @@ try { var util = require('util'); + /* istanbul ignore next */ if (typeof util.inherits !== 'function') throw ''; module.exports = util.inherits; } catch (e) { + /* istanbul ignore next */ module.exports = require('./inherits_browser.js'); } diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js index c1e78a75e6b52..86bbb3dc29e48 100644 --- a/node_modules/inherits/inherits_browser.js +++ b/node_modules/inherits/inherits_browser.js @@ -1,23 +1,27 @@ if (typeof Object.create === 'function') { // implementation from standard node.js 'util' module module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } }; } else { // old school shim for old browsers module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } } } diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json index 0ae0875ff4874..ab48b2a4b3dbf 100644 --- a/node_modules/inherits/package.json +++ b/node_modules/inherits/package.json @@ -1,52 +1,62 @@ { - "_args": [ - [ - "inherits@2.0.3", - "/Users/rebecca/code/npm" - ] - ], - "_from": "inherits@2.0.3", - "_id": "inherits@2.0.3", + "_from": "inherits@2.0.4", + "_id": "inherits@2.0.4", "_inBundle": false, - "_integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "_location": "/inherits", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "inherits@2.0.3", + "raw": "inherits@2.0.4", "name": "inherits", "escapedName": "inherits", - "rawSpec": "2.0.3", + "rawSpec": "2.0.4", "saveSpec": null, - "fetchSpec": "2.0.3" + "fetchSpec": "2.0.4" }, "_requiredBy": [ + "#USER", "/", + "/are-we-there-yet/readable-stream", "/block-stream", + "/cacache/glob", "/concat-stream", + "/concat-stream/readable-stream", "/duplexify", + "/duplexify/readable-stream", "/flush-write-stream", + "/flush-write-stream/readable-stream", "/from2", + "/from2/readable-stream", + "/fs-write-stream-atomic/readable-stream", "/fstream", "/glob", "/node-gyp/tar", "/parallel-transform", + "/parallel-transform/readable-stream", "/pumpify", "/readable-stream", "/sorted-union-stream/from2", - "/sorted-union-stream/readable-stream" + "/sorted-union-stream/readable-stream", + "/stream-iterate/readable-stream", + "/tap-mocha-reporter/readable-stream", + "/tar-stream", + "/through2/readable-stream" ], - "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "_spec": "2.0.3", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@2.0.4", + "_where": "/Users/isaacs/dev/npm/cli", "browser": "./inherits_browser.js", "bugs": { "url": "https://github.com/isaacs/inherits/issues" }, + "bundleDependencies": false, + "deprecated": false, "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", "devDependencies": { - "tap": "^7.1.0" + "tap": "^14.2.4" }, "files": [ "inherits.js", @@ -71,7 +81,7 @@ "url": "git://github.com/isaacs/inherits.git" }, "scripts": { - "test": "node test" + "test": "tap" }, - "version": "2.0.3" + "version": "2.0.4" } diff --git a/node_modules/libnpmsearch/.travis.yml b/node_modules/libnpmsearch/.travis.yml new file mode 100644 index 0000000000000..db5ea8b018640 --- /dev/null +++ b/node_modules/libnpmsearch/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "10" + - "9" + - "8" + - "6" diff --git a/node_modules/libnpmsearch/CHANGELOG.md b/node_modules/libnpmsearch/CHANGELOG.md index 6ca044d3f48b4..9417b57563a6d 100644 --- a/node_modules/libnpmsearch/CHANGELOG.md +++ b/node_modules/libnpmsearch/CHANGELOG.md @@ -2,6 +2,17 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [2.0.1](https://github.com/npm/libnpmsearch/compare/v2.0.0...v2.0.1) (2019-06-10) + + +### Bug Fixes + +* **opts:** support `opts.from` properly ([#2](https://github.com/npm/libnpmsearch/issues/2)) ([da6636c](https://github.com/npm/libnpmsearch/commit/da6636c)) +* **standard:** standard --fix ([beca19c](https://github.com/npm/libnpmsearch/commit/beca19c)) + + + # [2.0.0](https://github.com/npm/libnpmsearch/compare/v1.0.0...v2.0.0) (2018-08-28) diff --git a/node_modules/libnpmsearch/README.md b/node_modules/libnpmsearch/README.md index 6617ddb89d115..a83988cc2867d 100644 --- a/node_modules/libnpmsearch/README.md +++ b/node_modules/libnpmsearch/README.md @@ -46,7 +46,7 @@ console.log(await search('libnpm')) The following opts are used directly by `libnpmsearch` itself: * `opts.limit` - Number of results to limit the query to. Default: 20 -* `opts.offset` - Offset number for results. Used with `opts.limit` for pagination. Default: 0 +* `opts.from` - Offset number for results. Used with `opts.limit` for pagination. Default: 0 * `opts.detailed` - If true, returns an object with `package`, `score`, and `searchScore` fields, with `package` being what would usually be returned, and the other two containing details about how that package scored. Useful for UIs. Default: false * `opts.sortBy` - Used as a shorthand to set `opts.quality`, `opts.maintenance`, and `opts.popularity` with values that prioritize each one. Should be one of `'optimal'`, `'quality'`, `'maintenance'`, or `'popularity'`. Default: `'optimal'` * `opts.maintenance` - Decimal number between `0` and `1` that defines the weight of `maintenance` metrics when scoring and sorting packages. Default: `0.65` (same as `opts.sortBy: 'optimal'`) diff --git a/node_modules/libnpmsearch/appveyor.yml b/node_modules/libnpmsearch/appveyor.yml new file mode 100644 index 0000000000000..9cc64c58e02f9 --- /dev/null +++ b/node_modules/libnpmsearch/appveyor.yml @@ -0,0 +1,22 @@ +environment: + matrix: + - nodejs_version: "10" + - nodejs_version: "9" + - nodejs_version: "8" + - nodejs_version: "6" + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - npm config set spin false + - npm install + +test_script: + - npm test + +matrix: + fast_finish: true + +build: off diff --git a/node_modules/libnpmsearch/index.js b/node_modules/libnpmsearch/index.js index b84cab150bea2..995549aeee03d 100644 --- a/node_modules/libnpmsearch/index.js +++ b/node_modules/libnpmsearch/index.js @@ -5,12 +5,12 @@ const getStream = require('get-stream') const npmFetch = require('npm-registry-fetch') const SearchOpts = figgyPudding({ - detailed: {default: false}, - limit: {default: 20}, - from: {default: 0}, - quality: {default: 0.65}, - popularity: {default: 0.98}, - maintenance: {default: 0.5}, + detailed: { default: false }, + limit: { default: 20 }, + from: { default: 0 }, + quality: { default: 0.65 }, + popularity: { default: 0.98 }, + maintenance: { default: 0.5 }, sortBy: {} }) @@ -60,6 +60,7 @@ function searchStream (query, opts) { query: { text: Array.isArray(query) ? query.join(' ') : query, size: opts.limit, + from: opts.from, quality: opts.quality, popularity: opts.popularity, maintenance: opts.maintenance diff --git a/node_modules/libnpmsearch/package.json b/node_modules/libnpmsearch/package.json index 50e74c7adbff3..041dc168ddc22 100644 --- a/node_modules/libnpmsearch/package.json +++ b/node_modules/libnpmsearch/package.json @@ -1,32 +1,29 @@ { - "_args": [ - [ - "libnpmsearch@2.0.0", - "/Users/zkat/Documents/code/work/npm" - ] - ], - "_from": "libnpmsearch@2.0.0", - "_id": "libnpmsearch@2.0.0", + "_from": "libnpmsearch@2.0.1", + "_id": "libnpmsearch@2.0.1", "_inBundle": false, - "_integrity": "sha512-vd+JWbTGzOSfiOc+72MU6y7WqmBXn49egCCrIXp27iE/88bX8EpG64ST1blWQI1bSMUr9l1AKPMVsqa2tS5KWA==", + "_integrity": "sha512-K0yXyut9MHHCAH+DOiglQCpmBKPZXSUu76+BE2maSEfQN15OwNaA/Aiioe9lRFlVFOr7WcuJCY+VSl+gLi9NTA==", "_location": "/libnpmsearch", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "libnpmsearch@2.0.0", + "raw": "libnpmsearch@2.0.1", "name": "libnpmsearch", "escapedName": "libnpmsearch", - "rawSpec": "2.0.0", + "rawSpec": "2.0.1", "saveSpec": null, - "fetchSpec": "2.0.0" + "fetchSpec": "2.0.1" }, "_requiredBy": [ + "#USER", + "/", "/libnpm" ], - "_resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.0.tgz", - "_spec": "2.0.0", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.1.tgz", + "_shasum": "eccc73a8fbf267d765d18082b85daa2512501f96", + "_spec": "libnpmsearch@2.0.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@zkat.tech" @@ -34,17 +31,19 @@ "bugs": { "url": "https://github.com/npm/libnpmsearch/issues" }, + "bundleDependencies": false, "dependencies": { "figgy-pudding": "^3.5.1", "get-stream": "^4.0.0", "npm-registry-fetch": "^3.8.0" }, + "deprecated": false, "description": "Programmatic API for searching in npm and compatible registries.", "devDependencies": { "nock": "^9.6.1", - "standard": "*", + "standard": "^12.0.0", "standard-version": "*", - "tap": "*", + "tap": "^12.0.1", "weallbehave": "*", "weallcontribute": "*" }, @@ -70,5 +69,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.0.0" + "version": "2.0.1" } diff --git a/node_modules/libnpmsearch/test/index.js b/node_modules/libnpmsearch/test/index.js index f926af6da8559..bec5c70e46424 100644 --- a/node_modules/libnpmsearch/test/index.js +++ b/node_modules/libnpmsearch/test/index.js @@ -6,7 +6,7 @@ const qs = require('querystring') const test = require('tap').test const tnock = require('./util/tnock.js') -const OPTS = figgyPudding({registry: {}})({ +const OPTS = figgyPudding({ registry: {} })({ registry: 'https://mock.reg/' }) @@ -17,6 +17,7 @@ test('basic test', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -42,6 +43,7 @@ test('search.stream', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -69,6 +71,7 @@ test('accepts a limit option', t => { const query = qs.stringify({ text: 'oo', size: 3, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -81,7 +84,29 @@ test('accepts a limit option', t => { { package: { name: 'cool', version: '1.0.0' } } ] }) - return search('oo', OPTS.concat({limit: 3})).then(results => { + return search('oo', OPTS.concat({ limit: 3 })).then(results => { + t.equal(results.length, 4, 'returns more results if endpoint does so') + }) +}) + +test('accepts a from option', t => { + const query = qs.stringify({ + text: 'oo', + size: 20, + from: 1, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5 + }) + tnock(t, REG).get(`/-/v1/search?${query}`).once().reply(200, { + objects: [ + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.1.0' } }, + { package: { name: 'cool', version: '1.0.0' } }, + { package: { name: 'cool', version: '1.0.0' } } + ] + }) + return search('oo', OPTS.concat({ from: 1 })).then(results => { t.equal(results.length, 4, 'returns more results if endpoint does so') }) }) @@ -90,6 +115,7 @@ test('accepts quality/mainenance/popularity options', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 1, popularity: 2, maintenance: 3 @@ -115,6 +141,7 @@ test('sortBy: quality', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 1, popularity: 0, maintenance: 0 @@ -138,6 +165,7 @@ test('sortBy: popularity', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0, popularity: 1, maintenance: 0 @@ -161,6 +189,7 @@ test('sortBy: maintenance', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0, popularity: 0, maintenance: 1 @@ -184,6 +213,7 @@ test('sortBy: optimal', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -207,6 +237,7 @@ test('detailed format', t => { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0, popularity: 0, maintenance: 1 @@ -252,6 +283,7 @@ test('space-separates and URI-encodes multiple search params', t => { const query = qs.stringify({ text: 'foo bar:baz quux?=', size: 1, + from: 0, quality: 1, popularity: 2, maintenance: 3 diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md index d660dd5747abe..435dfebb7e27d 100644 --- a/node_modules/lru-cache/README.md +++ b/node_modules/lru-cache/README.md @@ -18,8 +18,8 @@ var LRU = require("lru-cache") , length: function (n, key) { return n * 2 + key.length } , dispose: function (key, n) { n.close() } , maxAge: 1000 * 60 * 60 } - , cache = LRU(options) - , otherCache = LRU(50) // sets just the max size + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size cache.set("key", "value") cache.get("key") // "value" @@ -49,10 +49,13 @@ away. * `max` The maximum size of the cache, checked by applying the length function to all values in the cache. Not setting this is kind of silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. * `maxAge` Maximum age in ms. Items are not pro-actively pruned out as they age, but if you try to get an item that is too old, it'll drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. * `length` Function that is used to calculate the length of stored items. If you're storing strings or buffers, then you probably want to do something like `function(n, key){return n.length}`. The default is @@ -76,6 +79,11 @@ away. it'll be called whenever a `set()` operation overwrites an existing key. If you set this option, `dispose()` will only be called when a key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) ## API diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js index bd35b53589381..573b6b85b9779 100644 --- a/node_modules/lru-cache/index.js +++ b/node_modules/lru-cache/index.js @@ -1,39 +1,20 @@ 'use strict' -module.exports = LRUCache - -// This will be a proper iterable 'Map' in engines that support it, -// or a fakey-fake PseudoMap in older versions. -var Map = require('pseudomap') -var util = require('util') - // A linked list to keep track of recently-used-ness -var Yallist = require('yallist') - -// use symbols if possible, otherwise just _props -var hasSymbol = typeof Symbol === 'function' && process.env._nodeLRUCacheForceNoSymbol !== '1' -var makeSymbol -if (hasSymbol) { - makeSymbol = function (key) { - return Symbol(key) - } -} else { - makeSymbol = function (key) { - return '_' + key - } -} +const Yallist = require('yallist') -var MAX = makeSymbol('max') -var LENGTH = makeSymbol('length') -var LENGTH_CALCULATOR = makeSymbol('lengthCalculator') -var ALLOW_STALE = makeSymbol('allowStale') -var MAX_AGE = makeSymbol('maxAge') -var DISPOSE = makeSymbol('dispose') -var NO_DISPOSE_ON_SET = makeSymbol('noDisposeOnSet') -var LRU_LIST = makeSymbol('lruList') -var CACHE = makeSymbol('cache') +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') -function naiveLength () { return 1 } +const naiveLength = () => 1 // lruList is a yallist where the head is the youngest // item, and the tail is the oldest. the list contains the Hit @@ -43,426 +24,311 @@ function naiveLength () { return 1 } // // cache is a Map (or PseudoMap) that matches the keys to // the Yallist.Node object. -function LRUCache (options) { - if (!(this instanceof LRUCache)) { - return new LRUCache(options) +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() } - if (typeof options === 'number') { - options = { max: options } - } + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') - if (!options) { - options = {} + this[MAX] = mL || Infinity + trim(this) } - - var max = this[MAX] = options.max - // Kind of weird to have a default max of Infinity, but oh well. - if (!max || - !(typeof max === 'number') || - max <= 0) { - this[MAX] = Infinity + get max () { + return this[MAX] } - var lc = options.length || naiveLength - if (typeof lc !== 'function') { - lc = naiveLength + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] } - this[LENGTH_CALCULATOR] = lc - - this[ALLOW_STALE] = options.stale || false - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this.reset() -} -// resize the cache when the max changes. -Object.defineProperty(LRUCache.prototype, 'max', { - set: function (mL) { - if (!mL || !(typeof mL === 'number') || mL <= 0) { - mL = Infinity - } - this[MAX] = mL - trim(this) - }, - get: function () { - return this[MAX] - }, - enumerable: true -}) + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') -Object.defineProperty(LRUCache.prototype, 'allowStale', { - set: function (allowStale) { - this[ALLOW_STALE] = !!allowStale - }, - get: function () { - return this[ALLOW_STALE] - }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'maxAge', { - set: function (mA) { - if (!mA || !(typeof mA === 'number') || mA < 0) { - mA = 0 - } this[MAX_AGE] = mA trim(this) - }, - get: function () { + } + get maxAge () { return this[MAX_AGE] - }, - enumerable: true -}) - -// resize the cache when the lengthCalculator changes. -Object.defineProperty(LRUCache.prototype, 'lengthCalculator', { - set: function (lC) { - if (typeof lC !== 'function') { + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') lC = naiveLength - } + if (lC !== this[LENGTH_CALCULATOR]) { this[LENGTH_CALCULATOR] = lC this[LENGTH] = 0 - this[LRU_LIST].forEach(function (hit) { + this[LRU_LIST].forEach(hit => { hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) this[LENGTH] += hit.length - }, this) + }) } trim(this) - }, - get: function () { return this[LENGTH_CALCULATOR] }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'length', { - get: function () { return this[LENGTH] }, - enumerable: true -}) - -Object.defineProperty(LRUCache.prototype, 'itemCount', { - get: function () { return this[LRU_LIST].length }, - enumerable: true -}) - -LRUCache.prototype.rforEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this[LRU_LIST].tail; walker !== null;) { - var prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev } -} + get lengthCalculator () { return this[LENGTH_CALCULATOR] } -function forEachStep (self, fn, node, thisp) { - var hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) { - hit = undefined + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev } } - if (hit) { - fn.call(thisp, hit.value, hit.key, self) + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } } -} -LRUCache.prototype.forEach = function (fn, thisp) { - thisp = thisp || this - for (var walker = this[LRU_LIST].head; walker !== null;) { - var next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next + keys () { + return this[LRU_LIST].toArray().map(k => k.key) } -} -LRUCache.prototype.keys = function () { - return this[LRU_LIST].toArray().map(function (k) { - return k.key - }, this) -} + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } -LRUCache.prototype.values = function () { - return this[LRU_LIST].toArray().map(function (k) { - return k.value - }, this) -} + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } -LRUCache.prototype.reset = function () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(function (hit) { - this[DISPOSE](hit.key, hit.value) - }, this) + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list } - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list -} - -LRUCache.prototype.dump = function () { - return this[LRU_LIST].map(function (hit) { - if (!isStale(this, hit)) { - return { + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { k: hit.key, v: hit.value, e: hit.now + (hit.maxAge || 0) - } - } - }, this).toArray().filter(function (h) { - return h - }) -} - -LRUCache.prototype.dumpLru = function () { - return this[LRU_LIST] -} - -/* istanbul ignore next */ -LRUCache.prototype.inspect = function (n, opts) { - var str = 'LRUCache {' - var extras = false - - var as = this[ALLOW_STALE] - if (as) { - str += '\n allowStale: true' - extras = true + }).toArray().filter(h => h) } - var max = this[MAX] - if (max && max !== Infinity) { - if (extras) { - str += ',' - } - str += '\n max: ' + util.inspect(max, opts) - extras = true + dumpLru () { + return this[LRU_LIST] } - var maxAge = this[MAX_AGE] - if (maxAge) { - if (extras) { - str += ',' - } - str += '\n maxAge: ' + util.inspect(maxAge, opts) - extras = true - } + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] - var lc = this[LENGTH_CALCULATOR] - if (lc && lc !== naiveLength) { - if (extras) { - str += ',' - } - str += '\n length: ' + util.inspect(this[LENGTH], opts) - extras = true - } + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') - var didFirst = false - this[LRU_LIST].forEach(function (item) { - if (didFirst) { - str += ',\n ' - } else { - if (extras) { - str += ',\n' + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false } - didFirst = true - str += '\n ' - } - var key = util.inspect(item.key).split('\n').join('\n ') - var val = { value: item.value } - if (item.maxAge !== maxAge) { - val.maxAge = item.maxAge - } - if (lc !== naiveLength) { - val.length = item.length - } - if (isStale(this, item)) { - val.stale = true - } - val = util.inspect(val, opts).split('\n').join('\n ') - str += key + ' => ' + val - }) + const node = this[CACHE].get(key) + const item = node.value - if (didFirst || extras) { - str += '\n' - } - str += '}' + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } - return str -} + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } -LRUCache.prototype.set = function (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] + const hit = new Entry(key, value, len, now, maxAge) - var now = maxAge ? Date.now() : 0 - var len = this[LENGTH_CALCULATOR](value, key) + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) return false } - var node = this[CACHE].get(key) - var item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) { - this[DISPOSE](key, item.value) - } - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) trim(this) return true } - var hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) { - this[DISPOSE](key, value) - } - return false + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) } - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true -} - -LRUCache.prototype.has = function (key) { - if (!this[CACHE].has(key)) return false - var hit = this[CACHE].get(key).value - if (isStale(this, hit)) { - return false + get (key) { + return get(this, key, true) } - return true -} -LRUCache.prototype.get = function (key) { - return get(this, key, true) -} + peek (key) { + return get(this, key, false) + } -LRUCache.prototype.peek = function (key) { - return get(this, key, false) -} + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null -LRUCache.prototype.pop = function () { - var node = this[LRU_LIST].tail - if (!node) return null - del(this, node) - return node.value -} + del(this, node) + return node.value + } -LRUCache.prototype.del = function (key) { - del(this, this[CACHE].get(key)) -} + del (key) { + del(this, this[CACHE].get(key)) + } -LRUCache.prototype.load = function (arr) { - // reset the cache - this.reset() - - var now = Date.now() - // A previous serialized cache has the most recent items first - for (var l = arr.length - 1; l >= 0; l--) { - var hit = arr[l] - var expiresAt = hit.e || 0 - if (expiresAt === 0) { - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - } else { - var maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } } } } -} -LRUCache.prototype.prune = function () { - var self = this - this[CACHE].forEach(function (value, key) { - get(self, key, false) - }) + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } } -function get (self, key, doUse) { - var node = self[CACHE].get(key) +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) if (node) { - var hit = node.value + const hit = node.value if (isStale(self, hit)) { del(self, node) - if (!self[ALLOW_STALE]) hit = undefined + if (!self[ALLOW_STALE]) + return undefined } else { if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() self[LRU_LIST].unshiftNode(node) } } - if (hit) hit = hit.value + return hit.value } - return hit } -function isStale (self, hit) { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) { +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) return false - } - var stale = false - var diff = Date.now() - hit.now - if (hit.maxAge) { - stale = diff > hit.maxAge - } else { - stale = self[MAX_AGE] && (diff > self[MAX_AGE]) - } - return stale + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) } -function trim (self) { +const trim = self => { if (self[LENGTH] > self[MAX]) { - for (var walker = self[LRU_LIST].tail; + for (let walker = self[LRU_LIST].tail; self[LENGTH] > self[MAX] && walker !== null;) { // We know that we're about to delete this one, and also // what the next least recently used key will be, so just // go ahead and set it now. - var prev = walker.prev + const prev = walker.prev del(self, walker) walker = prev } } } -function del (self, node) { +const del = (self, node) => { if (node) { - var hit = node.value - if (self[DISPOSE]) { + const hit = node.value + if (self[DISPOSE]) self[DISPOSE](hit.key, hit.value) - } + self[LENGTH] -= hit.length self[CACHE].delete(hit.key) self[LRU_LIST].removeNode(node) } } -// classy, since V8 prefers predictable objects. -function Entry (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) } + +module.exports = LRUCache diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json index 14760df46579f..3b5a1f8b3fa2d 100644 --- a/node_modules/lru-cache/package.json +++ b/node_modules/lru-cache/package.json @@ -1,32 +1,28 @@ { - "_from": "lru-cache@4.1.5", - "_id": "lru-cache@4.1.5", + "_from": "lru-cache@5.1.1", + "_id": "lru-cache@5.1.1", "_inBundle": false, - "_integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "_integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "_location": "/lru-cache", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "lru-cache@4.1.5", + "raw": "lru-cache@5.1.1", "name": "lru-cache", "escapedName": "lru-cache", - "rawSpec": "4.1.5", + "rawSpec": "5.1.1", "saveSpec": null, - "fetchSpec": "4.1.5" + "fetchSpec": "5.1.1" }, "_requiredBy": [ "#USER", - "/", - "/cross-spawn", - "/foreground-child/cross-spawn", - "/make-fetch-happen", - "/npm-registry-fetch" + "/" ], - "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "_shasum": "8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd", - "_spec": "lru-cache@4.1.5", - "_where": "/Users/aeschright/code/cli", + "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "_shasum": "1da27e6710271947695daf6848e847f01d84b920", + "_spec": "lru-cache@5.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me" @@ -36,14 +32,12 @@ }, "bundleDependencies": false, "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" }, "deprecated": false, "description": "A cache object that deletes the least-recently-used items.", "devDependencies": { "benchmark": "^2.1.4", - "standard": "^12.0.1", "tap": "^12.1.0" }, "files": [ @@ -64,13 +58,11 @@ }, "scripts": { "coveragerport": "tap --coverage-report=html", - "lintfix": "standard --fix test/*.js index.js", "postpublish": "git push origin --all; git push origin --tags", - "posttest": "standard test/*.js index.js", - "postversion": "npm publish --tag=legacy", + "postversion": "npm publish", "preversion": "npm test", "snap": "TAP_SNAPSHOT=1 tap test/*.js -J", "test": "tap test/*.js --100 -J" }, - "version": "4.1.5" + "version": "5.1.1" } diff --git a/node_modules/make-fetch-happen/CHANGELOG.md b/node_modules/make-fetch-happen/CHANGELOG.md index b83beda41379d..a446842f55d80 100644 --- a/node_modules/make-fetch-happen/CHANGELOG.md +++ b/node_modules/make-fetch-happen/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [4.0.2](https://github.com/zkat/make-fetch-happen/compare/v4.0.1...v4.0.2) (2019-07-02) + + + ## [4.0.1](https://github.com/zkat/make-fetch-happen/compare/v4.0.0...v4.0.1) (2018-04-12) diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 9aa7f113c5955..9a2b8d75b7ec5 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,8 +1,8 @@ { "_from": "make-fetch-happen@^4.0.1", - "_id": "make-fetch-happen@4.0.1", + "_id": "make-fetch-happen@4.0.2", "_inBundle": false, - "_integrity": "sha512-7R5ivfy9ilRJ1EMKIOziwrns9fGeAD4bAha8EB7BIiBBLHm2KeTUGCrICFt2rbHfzheTLynv50GnNTK1zDTrcQ==", + "_integrity": "sha512-YMJrAjHSb/BordlsDEcVcPyTbiJKkzqMf48N8dAJZT9Zjctrkb6Yg4TY9Sq2AwSIQJFn5qBBKVTYt3vP5FMIHA==", "_location": "/make-fetch-happen", "_phantomChildren": {}, "_requested": { @@ -16,13 +16,12 @@ "fetchSpec": "^4.0.1" }, "_requiredBy": [ - "/libnpmhook/npm-registry-fetch", "/pacote" ], - "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.1.tgz", - "_shasum": "141497cb878f243ba93136c83d8aba12c216c083", + "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.2.tgz", + "_shasum": "2d156b11696fb32bffbafe1ac1bc085dd6c78a79", "_spec": "make-fetch-happen@^4.0.1", - "_where": "/Users/rebecca/code/npm/node_modules/pacote", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/pacote", "author": { "name": "Kat Marchán", "email": "kzm@zkat.tech" @@ -33,11 +32,11 @@ "bundleDependencies": false, "dependencies": { "agentkeepalive": "^3.4.1", - "cacache": "^11.0.1", + "cacache": "^11.3.3", "http-cache-semantics": "^3.8.1", "http-proxy-agent": "^2.1.0", "https-proxy-agent": "^2.2.1", - "lru-cache": "^4.1.2", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "node-fetch-npm": "^2.0.2", "promise-retry": "^1.1.1", @@ -57,7 +56,7 @@ "standard": "^11.0.1", "standard-version": "^4.3.0", "tacks": "^1.2.6", - "tap": "^11.1.3", + "tap": "^12.7.0", "weallbehave": "^1.0.0", "weallcontribute": "^1.0.7" }, @@ -91,5 +90,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "4.0.1" + "version": "4.0.2" } diff --git a/node_modules/node-gyp/CHANGELOG.md b/node_modules/node-gyp/CHANGELOG.md index 33bbfad5dec9a..05167c92dc640 100644 --- a/node_modules/node-gyp/CHANGELOG.md +++ b/node_modules/node-gyp/CHANGELOG.md @@ -1,3 +1,11 @@ +v4.0.0 2019-04-24 +================= + +* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713) +* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212) +* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705) +* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534) + v3.8.0 2018-08-09 ================= diff --git a/node_modules/node-gyp/lib/configure.js b/node_modules/node-gyp/lib/configure.js index 35e6787d1f3e3..a42abe59724c7 100644 --- a/node_modules/node-gyp/lib/configure.js +++ b/node_modules/node-gyp/lib/configure.js @@ -448,7 +448,7 @@ PythonFinder.prototype = { }, checkPythonVersion: function checkPythonVersion () { - var args = ['-c', 'import sys; print "%s.%s.%s" % sys.version_info[:3];'] + var args = ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'] var env = extend({}, this.env) env.TERM = 'dumb' diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js index cb84972e18325..8a615dfb38880 100644 --- a/node_modules/node-gyp/lib/install.js +++ b/node_modules/node-gyp/lib/install.js @@ -20,10 +20,8 @@ var fs = require('graceful-fs') , rm = require('rimraf') , path = require('path') , crypto = require('crypto') - , zlib = require('zlib') , log = require('npmlog') , semver = require('semver') - , fstream = require('fstream') , request = require('request') , mkdir = require('mkdirp') , processRelease = require('./process-release') @@ -148,41 +146,33 @@ function install (fs, gyp, argv, callback) { var tarPath = gyp.opts.tarball var badDownload = false , extractCount = 0 - , gunzip = zlib.createGunzip() - , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid }) var contentShasums = {} var expectShasums = {} // checks if a file to be extracted from the tarball is valid. // only .h header files and the gyp files get extracted - function isValid () { - var name = this.path.substring(devDir.length + 1) - var isValid = valid(name) - if (name === '' && this.type === 'Directory') { - // the first directory entry is ok - return true - } + function isValid (path, entry) { + var isValid = valid(path) if (isValid) { - log.verbose('extracted file from tarball', name) + log.verbose('extracted file from tarball', path) extractCount++ } else { // invalid - log.silly('ignoring from tarball', name) + log.silly('ignoring from tarball', path) } return isValid } - gunzip.on('error', cb) - extracter.on('error', cb) - extracter.on('end', afterTarball) - - // download the tarball, gunzip and extract! + // download the tarball and extract! if (tarPath) { - var input = fs.createReadStream(tarPath) - input.pipe(gunzip).pipe(extracter) - return + return tar.extract({ + file: tarPath, + strip: 1, + filter: isValid, + cwd: devDir + }).then(afterTarball, cb) } try { @@ -222,7 +212,11 @@ function install (fs, gyp, argv, callback) { }) // start unzipping and untaring - req.pipe(gunzip).pipe(extracter) + res.pipe(tar.extract({ + strip: 1, + cwd: devDir, + filter: isValid + }).on('close', afterTarball).on('error', cb)) }) // invoked after the tarball has finished being extracted diff --git a/node_modules/node-gyp/node_modules/tar/.travis.yml b/node_modules/node-gyp/node_modules/tar/.travis.yml deleted file mode 100644 index fca8ef019405d..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - 0.10 - - 0.11 diff --git a/node_modules/node-gyp/node_modules/tar/README.md b/node_modules/node-gyp/node_modules/tar/README.md deleted file mode 100644 index cfda2ac180611..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# node-tar - -Tar for Node.js. - -[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/) - -## API - -See `examples/` for usage examples. - -### var tar = require('tar') - -Returns an object with `.Pack`, `.Extract` and `.Parse` methods. - -### tar.Pack([properties]) - -Returns a through stream. Use -[fstream](https://npmjs.org/package/fstream) to write files into the -pack stream and you will receive tar archive data from the pack -stream. - -This only works with directories, it does not work with individual files. - -The optional `properties` object are used to set properties in the tar -'Global Extended Header'. If the `fromBase` property is set to true, -the tar will contain files relative to the path passed, and not with -the path included. - -### tar.Extract([options]) - -Returns a through stream. Write tar data to the stream and the files -in the tarball will be extracted onto the filesystem. - -`options` can be: - -```js -{ - path: '/path/to/extract/tar/into', - strip: 0, // how many path segments to strip from the root when extracting -} -``` - -`options` also get passed to the `fstream.Writer` instance that `tar` -uses internally. - -### tar.Parse() - -Returns a writable stream. Write tar data to it and it will emit -`entry` events for each entry parsed from the tarball. This is used by -`tar.Extract`. diff --git a/node_modules/node-gyp/node_modules/tar/examples/extracter.js b/node_modules/node-gyp/node_modules/tar/examples/extracter.js deleted file mode 100644 index f6253a72c5cd3..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/extracter.js +++ /dev/null @@ -1,19 +0,0 @@ -var tar = require("../tar.js") - , fs = require("fs") - - -function onError(err) { - console.error('An error occurred:', err) -} - -function onEnd() { - console.log('Extracted!') -} - -var extractor = tar.Extract({path: __dirname + "/extract"}) - .on('error', onError) - .on('end', onEnd); - -fs.createReadStream(__dirname + "/../test/fixtures/c.tar") - .on('error', onError) - .pipe(extractor); diff --git a/node_modules/node-gyp/node_modules/tar/examples/packer.js b/node_modules/node-gyp/node_modules/tar/examples/packer.js deleted file mode 100644 index 039969ce300d1..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/packer.js +++ /dev/null @@ -1,24 +0,0 @@ -var tar = require("../tar.js") - , fstream = require("fstream") - , fs = require("fs") - -var dirDest = fs.createWriteStream('dir.tar') - - -function onError(err) { - console.error('An error occurred:', err) -} - -function onEnd() { - console.log('Packed!') -} - -var packer = tar.Pack({ noProprietary: true }) - .on('error', onError) - .on('end', onEnd); - -// This must be a "directory" -fstream.Reader({ path: __dirname, type: "Directory" }) - .on('error', onError) - .pipe(packer) - .pipe(dirDest) diff --git a/node_modules/node-gyp/node_modules/tar/examples/reader.js b/node_modules/node-gyp/node_modules/tar/examples/reader.js deleted file mode 100644 index 39f3f0888a2cf..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/examples/reader.js +++ /dev/null @@ -1,36 +0,0 @@ -var tar = require("../tar.js") - , fs = require("fs") - -fs.createReadStream(__dirname + "/../test/fixtures/c.tar") - .pipe(tar.Parse()) - .on("extendedHeader", function (e) { - console.error("extended pax header", e.props) - e.on("end", function () { - console.error("extended pax fields:", e.fields) - }) - }) - .on("ignoredEntry", function (e) { - console.error("ignoredEntry?!?", e.props) - }) - .on("longLinkpath", function (e) { - console.error("longLinkpath entry", e.props) - e.on("end", function () { - console.error("value=%j", e.body.toString()) - }) - }) - .on("longPath", function (e) { - console.error("longPath entry", e.props) - e.on("end", function () { - console.error("value=%j", e.body.toString()) - }) - }) - .on("entry", function (e) { - console.error("entry", e.props) - e.on("data", function (c) { - console.error(" >>>" + c.toString().replace(/\n/g, "\\n")) - }) - e.on("end", function () { - console.error(" << 0 - return !this._needDrain -} - -EntryWriter.prototype.end = function (c) { - // console.error(".. ew end") - if (c) this._buffer.push(c) - this._buffer.push(EOF) - this._ended = true - this._process() - this._needDrain = this._buffer.length > 0 -} - -EntryWriter.prototype.pause = function () { - // console.error(".. ew pause") - this._paused = true - this.emit("pause") -} - -EntryWriter.prototype.resume = function () { - // console.error(".. ew resume") - this._paused = false - this.emit("resume") - this._process() -} - -EntryWriter.prototype.add = function (entry) { - // console.error(".. ew add") - if (!this.parent) return this.emit("error", new Error("no parent")) - - // make sure that the _header and such is emitted, and clear out - // the _currentEntry link on the parent. - if (!this._ended) this.end() - - return this.parent.add(entry) -} - -EntryWriter.prototype._header = function () { - // console.error(".. ew header") - if (this._didHeader) return - this._didHeader = true - - var headerBlock = TarHeader.encode(this.props) - - if (this.props.needExtended && !this._meta) { - var me = this - - ExtendedHeaderWriter = ExtendedHeaderWriter || - require("./extended-header-writer.js") - - ExtendedHeaderWriter(this.props) - .on("data", function (c) { - me.emit("data", c) - }) - .on("error", function (er) { - me.emit("error", er) - }) - .end() - } - - // console.error(".. .. ew headerBlock emitting") - this.emit("data", headerBlock) - this.emit("header") -} - -EntryWriter.prototype._process = function () { - // console.error(".. .. ew process") - if (!this._didHeader && !this._meta) { - this._header() - } - - if (this._paused || this._processing) { - // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing) - return - } - - this._processing = true - - var buf = this._buffer - for (var i = 0; i < buf.length; i ++) { - // console.error(".. .. .. i=%d", i) - - var c = buf[i] - - if (c === EOF) this._stream.end() - else this._stream.write(c) - - if (this._paused) { - // console.error(".. .. .. paused mid-emission") - this._processing = false - if (i < buf.length) { - this._needDrain = true - this._buffer = buf.slice(i + 1) - } - return - } - } - - // console.error(".. .. .. emitted") - this._buffer.length = 0 - this._processing = false - - // console.error(".. .. .. emitting drain") - this.emit("drain") -} - -EntryWriter.prototype.destroy = function () {} diff --git a/node_modules/node-gyp/node_modules/tar/lib/entry.js b/node_modules/node-gyp/node_modules/tar/lib/entry.js deleted file mode 100644 index 591202bd3b9af..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/entry.js +++ /dev/null @@ -1,220 +0,0 @@ -// A passthrough read/write stream that sets its properties -// based on a header, extendedHeader, and globalHeader -// -// Can be either a file system object of some sort, or -// a pax/ustar metadata entry. - -module.exports = Entry - -var TarHeader = require("./header.js") - , tar = require("../tar") - , assert = require("assert").ok - , Stream = require("stream").Stream - , inherits = require("inherits") - , fstream = require("fstream").Abstract - -function Entry (header, extended, global) { - Stream.call(this) - this.readable = true - this.writable = true - - this._needDrain = false - this._paused = false - this._reading = false - this._ending = false - this._ended = false - this._remaining = 0 - this._abort = false - this._queue = [] - this._index = 0 - this._queueLen = 0 - - this._read = this._read.bind(this) - - this.props = {} - this._header = header - this._extended = extended || {} - - // globals can change throughout the course of - // a file parse operation. Freeze it at its current state. - this._global = {} - var me = this - Object.keys(global || {}).forEach(function (g) { - me._global[g] = global[g] - }) - - this._setProps() -} - -inherits(Entry, Stream) - -Entry.prototype.write = function (c) { - if (this._ending) this.error("write() after end()", null, true) - if (this._remaining === 0) { - this.error("invalid bytes past eof") - } - - // often we'll get a bunch of \0 at the end of the last write, - // since chunks will always be 512 bytes when reading a tarball. - if (c.length > this._remaining) { - c = c.slice(0, this._remaining) - } - this._remaining -= c.length - - // put it on the stack. - var ql = this._queueLen - this._queue.push(c) - this._queueLen ++ - - this._read() - - // either paused, or buffered - if (this._paused || ql > 0) { - this._needDrain = true - return false - } - - return true -} - -Entry.prototype.end = function (c) { - if (c) this.write(c) - this._ending = true - this._read() -} - -Entry.prototype.pause = function () { - this._paused = true - this.emit("pause") -} - -Entry.prototype.resume = function () { - // console.error(" Tar Entry resume", this.path) - this.emit("resume") - this._paused = false - this._read() - return this._queueLen - this._index > 1 -} - - // This is bound to the instance -Entry.prototype._read = function () { - // console.error(" Tar Entry _read", this.path) - - if (this._paused || this._reading || this._ended) return - - // set this flag so that event handlers don't inadvertently - // get multiple _read() calls running. - this._reading = true - - // have any data to emit? - while (this._index < this._queueLen && !this._paused) { - var chunk = this._queue[this._index ++] - this.emit("data", chunk) - } - - // check if we're drained - if (this._index >= this._queueLen) { - this._queue.length = this._queueLen = this._index = 0 - if (this._needDrain) { - this._needDrain = false - this.emit("drain") - } - if (this._ending) { - this._ended = true - this.emit("end") - } - } - - // if the queue gets too big, then pluck off whatever we can. - // this should be fairly rare. - var mql = this._maxQueueLen - if (this._queueLen > mql && this._index > 0) { - mql = Math.min(this._index, mql) - this._index -= mql - this._queueLen -= mql - this._queue = this._queue.slice(mql) - } - - this._reading = false -} - -Entry.prototype._setProps = function () { - // props = extended->global->header->{} - var header = this._header - , extended = this._extended - , global = this._global - , props = this.props - - // first get the values from the normal header. - var fields = tar.fields - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = header[field] - if (typeof val !== "undefined") props[field] = val - } - - // next, the global header for this file. - // numeric values, etc, will have already been parsed. - ;[global, extended].forEach(function (p) { - Object.keys(p).forEach(function (f) { - if (typeof p[f] !== "undefined") props[f] = p[f] - }) - }) - - // no nulls allowed in path or linkpath - ;["path", "linkpath"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = props[p].split("\0")[0] - } - }) - - - // set date fields to be a proper date - ;["mtime", "ctime", "atime"].forEach(function (p) { - if (props.hasOwnProperty(p)) { - props[p] = new Date(props[p] * 1000) - } - }) - - // set the type so that we know what kind of file to create - var type - switch (tar.types[props.type]) { - case "OldFile": - case "ContiguousFile": - type = "File" - break - - case "GNUDumpDir": - type = "Directory" - break - - case undefined: - type = "Unknown" - break - - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - default: - type = tar.types[props.type] - } - - this.type = type - this.path = props.path - this.size = props.size - - // size is special, since it signals when the file needs to end. - this._remaining = props.size -} - -// the parser may not call write if _abort is true. -// useful for skipping data from some files quickly. -Entry.prototype.abort = function(){ - this._abort = true -} - -Entry.prototype.warn = fstream.warn -Entry.prototype.error = fstream.error diff --git a/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js b/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js deleted file mode 100644 index 1728c4583ae06..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js +++ /dev/null @@ -1,191 +0,0 @@ - -module.exports = ExtendedHeaderWriter - -var inherits = require("inherits") - , EntryWriter = require("./entry-writer.js") - -inherits(ExtendedHeaderWriter, EntryWriter) - -var tar = require("../tar.js") - , path = require("path") - , TarHeader = require("./header.js") - -// props is the props of the thing we need to write an -// extended header for. -// Don't be shy with it. Just encode everything. -function ExtendedHeaderWriter (props) { - // console.error(">> ehw ctor") - var me = this - - if (!(me instanceof ExtendedHeaderWriter)) { - return new ExtendedHeaderWriter(props) - } - - me.fields = props - - var p = - { path : ("PaxHeader" + path.join("/", props.path || "")) - .replace(/\\/g, "/").substr(0, 100) - , mode : props.mode || 0666 - , uid : props.uid || 0 - , gid : props.gid || 0 - , size : 0 // will be set later - , mtime : props.mtime || Date.now() / 1000 - , type : "x" - , linkpath : "" - , ustar : "ustar\0" - , ustarver : "00" - , uname : props.uname || "" - , gname : props.gname || "" - , devmaj : props.devmaj || 0 - , devmin : props.devmin || 0 - } - - - EntryWriter.call(me, p) - // console.error(">> ehw props", me.props) - me.props = p - - me._meta = true -} - -ExtendedHeaderWriter.prototype.end = function () { - // console.error(">> ehw end") - var me = this - - if (me._ended) return - me._ended = true - - me._encodeFields() - - if (me.props.size === 0) { - // nothing to write! - me._ready = true - me._stream.end() - return - } - - me._stream.write(TarHeader.encode(me.props)) - me.body.forEach(function (l) { - me._stream.write(l) - }) - me._ready = true - - // console.error(">> ehw _process calling end()", me.props) - this._stream.end() -} - -ExtendedHeaderWriter.prototype._encodeFields = function () { - // console.error(">> ehw _encodeFields") - this.body = [] - if (this.fields.prefix) { - this.fields.path = this.fields.prefix + "/" + this.fields.path - this.fields.prefix = "" - } - encodeFields(this.fields, "", this.body, this.fields.noProprietary) - var me = this - this.body.forEach(function (l) { - me.props.size += l.length - }) -} - -function encodeFields (fields, prefix, body, nop) { - // console.error(">> >> ehw encodeFields") - // "%d %s=%s\n", , , - // The length is a decimal number, and includes itself and the \n - // Numeric values are decimal strings. - - Object.keys(fields).forEach(function (k) { - var val = fields[k] - , numeric = tar.numeric[k] - - if (prefix) k = prefix + "." + k - - // already including NODETAR.type, don't need File=true also - if (k === fields.type && val === true) return - - switch (k) { - // don't include anything that's always handled just fine - // in the normal header, or only meaningful in the context - // of nodetar - case "mode": - case "cksum": - case "ustar": - case "ustarver": - case "prefix": - case "basename": - case "dirname": - case "needExtended": - case "block": - case "filter": - return - - case "rdev": - if (val === 0) return - break - - case "nlink": - case "dev": // Truly a hero among men, Creator of Star! - case "ino": // Speak his name with reverent awe! It is: - k = "SCHILY." + k - break - - default: break - } - - if (val && typeof val === "object" && - !Buffer.isBuffer(val)) encodeFields(val, k, body, nop) - else if (val === null || val === undefined) return - else body.push.apply(body, encodeField(k, val, nop)) - }) - - return body -} - -function encodeField (k, v, nop) { - // lowercase keys must be valid, otherwise prefix with - // "NODETAR." - if (k.charAt(0) === k.charAt(0).toLowerCase()) { - var m = k.split(".")[0] - if (!tar.knownExtended[m]) k = "NODETAR." + k - } - - // no proprietary - if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) { - return [] - } - - if (typeof val === "number") val = val.toString(10) - - var s = new Buffer(" " + k + "=" + v + "\n") - , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1 - - // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - // if adding that many digits will make it go over that length, - // then add one to it. For example, if the string is: - // " foo=bar\n" - // then that's 9 characters. With the "9", that bumps the length - // up to 10. However, this is invalid: - // "10 foo=bar\n" - // but, since that's actually 11 characters, since 10 adds another - // character to the length, and the length includes the number - // itself. In that case, just bump it up again. - if (s.length + digits >= Math.pow(10, digits)) digits += 1 - // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) - - var len = digits + s.length - // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len) - var lenBuf = new Buffer("" + len) - if (lenBuf.length + s.length !== len) { - throw new Error("Bad length calculation\n"+ - "len="+len+"\n"+ - "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+ - "lenBuf.length="+lenBuf.length+"\n"+ - "digits="+digits+"\n"+ - "s="+JSON.stringify(s.toString())+"\n"+ - "s.length="+s.length) - } - - return [lenBuf, s] -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/extended-header.js b/node_modules/node-gyp/node_modules/tar/lib/extended-header.js deleted file mode 100644 index 74f432ceee5b2..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extended-header.js +++ /dev/null @@ -1,140 +0,0 @@ -// An Entry consisting of: -// -// "%d %s=%s\n", , , -// -// The length is a decimal number, and includes itself and the \n -// \0 does not terminate anything. Only the length terminates the string. -// Numeric values are decimal strings. - -module.exports = ExtendedHeader - -var Entry = require("./entry.js") - , inherits = require("inherits") - , tar = require("../tar.js") - , numeric = tar.numeric - , keyTrans = { "SCHILY.dev": "dev" - , "SCHILY.ino": "ino" - , "SCHILY.nlink": "nlink" } - -function ExtendedHeader () { - Entry.apply(this, arguments) - this.on("data", this._parse) - this.fields = {} - this._position = 0 - this._fieldPos = 0 - this._state = SIZE - this._sizeBuf = [] - this._keyBuf = [] - this._valBuf = [] - this._size = -1 - this._key = "" -} - -inherits(ExtendedHeader, Entry) -ExtendedHeader.prototype._parse = parse - -var s = 0 - , states = ExtendedHeader.states = {} - , SIZE = states.SIZE = s++ - , KEY = states.KEY = s++ - , VAL = states.VAL = s++ - , ERR = states.ERR = s++ - -Object.keys(states).forEach(function (s) { - states[states[s]] = states[s] -}) - -states[s] = null - -// char code values for comparison -var _0 = "0".charCodeAt(0) - , _9 = "9".charCodeAt(0) - , point = ".".charCodeAt(0) - , a = "a".charCodeAt(0) - , Z = "Z".charCodeAt(0) - , a = "a".charCodeAt(0) - , z = "z".charCodeAt(0) - , space = " ".charCodeAt(0) - , eq = "=".charCodeAt(0) - , cr = "\n".charCodeAt(0) - -function parse (c) { - if (this._state === ERR) return - - for ( var i = 0, l = c.length - ; i < l - ; this._position++, this._fieldPos++, i++) { - // console.error("top of loop, size="+this._size) - - var b = c[i] - - if (this._size >= 0 && this._fieldPos > this._size) { - error(this, "field exceeds length="+this._size) - return - } - - switch (this._state) { - case ERR: return - - case SIZE: - // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString()) - if (b === space) { - this._state = KEY - // this._fieldPos = this._sizeBuf.length - this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10) - this._sizeBuf.length = 0 - continue - } - if (b < _0 || b > _9) { - error(this, "expected [" + _0 + ".." + _9 + "], got " + b) - return - } - this._sizeBuf.push(b) - continue - - case KEY: - // can be any char except =, not > size. - if (b === eq) { - this._state = VAL - this._key = new Buffer(this._keyBuf).toString() - if (keyTrans[this._key]) this._key = keyTrans[this._key] - this._keyBuf.length = 0 - continue - } - this._keyBuf.push(b) - continue - - case VAL: - // field must end with cr - if (this._fieldPos === this._size - 1) { - // console.error("finished with "+this._key) - if (b !== cr) { - error(this, "expected \\n at end of field") - return - } - var val = new Buffer(this._valBuf).toString() - if (numeric[this._key]) { - val = parseFloat(val) - } - this.fields[this._key] = val - - this._valBuf.length = 0 - this._state = SIZE - this._size = -1 - this._fieldPos = -1 - continue - } - this._valBuf.push(b) - continue - } - } -} - -function error (me, msg) { - msg = "invalid header: " + msg - + "\nposition=" + me._position - + "\nfield position=" + me._fieldPos - - me.error(msg) - me.state = ERR -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/extract.js b/node_modules/node-gyp/node_modules/tar/lib/extract.js deleted file mode 100644 index fe1bb976eb0ce..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/extract.js +++ /dev/null @@ -1,94 +0,0 @@ -// give it a tarball and a path, and it'll dump the contents - -module.exports = Extract - -var tar = require("../tar.js") - , fstream = require("fstream") - , inherits = require("inherits") - , path = require("path") - -function Extract (opts) { - if (!(this instanceof Extract)) return new Extract(opts) - tar.Parse.apply(this) - - if (typeof opts !== "object") { - opts = { path: opts } - } - - // better to drop in cwd? seems more standard. - opts.path = opts.path || path.resolve("node-tar-extract") - opts.type = "Directory" - opts.Directory = true - - // similar to --strip or --strip-components - opts.strip = +opts.strip - if (!opts.strip || opts.strip <= 0) opts.strip = 0 - - this._fst = fstream.Writer(opts) - - this.pause() - var me = this - - // Hardlinks in tarballs are relative to the root - // of the tarball. So, they need to be resolved against - // the target directory in order to be created properly. - me.on("entry", function (entry) { - // if there's a "strip" argument, then strip off that many - // path components. - if (opts.strip) { - var p = entry.path.split("/").slice(opts.strip).join("/") - entry.path = entry.props.path = p - if (entry.linkpath) { - var lp = entry.linkpath.split("/").slice(opts.strip).join("/") - entry.linkpath = entry.props.linkpath = lp - } - } - if (entry.type === "Link") { - entry.linkpath = entry.props.linkpath = - path.join(opts.path, path.join("/", entry.props.linkpath)) - } - - if (entry.type === "SymbolicLink") { - var dn = path.dirname(entry.path) || "" - var linkpath = entry.props.linkpath - var target = path.resolve(opts.path, dn, linkpath) - if (target.indexOf(opts.path) !== 0) { - linkpath = path.join(opts.path, path.join("/", linkpath)) - } - entry.linkpath = entry.props.linkpath = linkpath - } - }) - - this._fst.on("ready", function () { - me.pipe(me._fst, { end: false }) - me.resume() - }) - - this._fst.on('error', function(err) { - me.emit('error', err) - }) - - this._fst.on('drain', function() { - me.emit('drain') - }) - - // this._fst.on("end", function () { - // console.error("\nEEEE Extract End", me._fst.path) - // }) - - this._fst.on("close", function () { - // console.error("\nEEEE Extract End", me._fst.path) - me.emit("finish") - me.emit("end") - me.emit("close") - }) -} - -inherits(Extract, tar.Parse) - -Extract.prototype._streamEnd = function () { - var me = this - if (!me._ended || me._entry) me.error("unexpected eof") - me._fst.end() - // my .end() is coming later. -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js b/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js deleted file mode 100644 index 0bfc7b80aa7c6..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js +++ /dev/null @@ -1,14 +0,0 @@ -module.exports = GlobalHeaderWriter - -var ExtendedHeaderWriter = require("./extended-header-writer.js") - , inherits = require("inherits") - -inherits(GlobalHeaderWriter, ExtendedHeaderWriter) - -function GlobalHeaderWriter (props) { - if (!(this instanceof GlobalHeaderWriter)) { - return new GlobalHeaderWriter(props) - } - ExtendedHeaderWriter.call(this, props) - this.props.type = "g" -} diff --git a/node_modules/node-gyp/node_modules/tar/lib/header.js b/node_modules/node-gyp/node_modules/tar/lib/header.js deleted file mode 100644 index 05b237c0c7b32..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/header.js +++ /dev/null @@ -1,385 +0,0 @@ -// parse a 512-byte header block to a data object, or vice-versa -// If the data won't fit nicely in a simple header, then generate -// the appropriate extended header file, and return that. - -module.exports = TarHeader - -var tar = require("../tar.js") - , fields = tar.fields - , fieldOffs = tar.fieldOffs - , fieldEnds = tar.fieldEnds - , fieldSize = tar.fieldSize - , numeric = tar.numeric - , assert = require("assert").ok - , space = " ".charCodeAt(0) - , slash = "/".charCodeAt(0) - , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null - -function TarHeader (block) { - if (!(this instanceof TarHeader)) return new TarHeader(block) - if (block) this.decode(block) -} - -TarHeader.prototype = - { decode : decode - , encode: encode - , calcSum: calcSum - , checkSum: checkSum - } - -TarHeader.parseNumeric = parseNumeric -TarHeader.encode = encode -TarHeader.decode = decode - -// note that this will only do the normal ustar header, not any kind -// of extended posix header file. If something doesn't fit comfortably, -// then it will set obj.needExtended = true, and set the block to -// the closest approximation. -function encode (obj) { - if (!obj && !(this instanceof TarHeader)) throw new Error( - "encode must be called on a TarHeader, or supplied an object") - - obj = obj || this - var block = obj.block = new Buffer(512) - - // if the object has a "prefix", then that's actually an extension of - // the path field. - if (obj.prefix) { - // console.error("%% header encoding, got a prefix", obj.prefix) - obj.path = obj.prefix + "/" + obj.path - // console.error("%% header encoding, prefixed path", obj.path) - obj.prefix = "" - } - - obj.needExtended = false - - if (obj.mode) { - if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8) - obj.mode = obj.mode & 0777 - } - - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , off = fieldOffs[f] - , end = fieldEnds[f] - , ret - - switch (field) { - case "cksum": - // special, done below, after all the others - break - - case "prefix": - // special, this is an extension of the "path" field. - // console.error("%% header encoding, skip prefix later") - break - - case "type": - // convert from long name to a single char. - var type = obj.type || "0" - if (type.length > 1) { - type = tar.types[obj.type] - if (!type) type = "0" - } - writeText(block, off, end, type) - break - - case "path": - // uses the "prefix" field if > 100 bytes, but <= 255 - var pathLen = Buffer.byteLength(obj.path) - , pathFSize = fieldSize[fields.path] - , prefFSize = fieldSize[fields.prefix] - - // paths between 100 and 255 should use the prefix field. - // longer than 255 - if (pathLen > pathFSize && - pathLen <= pathFSize + prefFSize) { - // need to find a slash somewhere in the middle so that - // path and prefix both fit in their respective fields - var searchStart = pathLen - 1 - pathFSize - , searchEnd = prefFSize - , found = false - , pathBuf = new Buffer(obj.path) - - for ( var s = searchStart - ; (s <= searchEnd) - ; s ++ ) { - if (pathBuf[s] === slash || pathBuf[s] === bslash) { - found = s - break - } - } - - if (found !== false) { - prefix = pathBuf.slice(0, found).toString("utf8") - path = pathBuf.slice(found + 1).toString("utf8") - - ret = writeText(block, off, end, path) - off = fieldOffs[fields.prefix] - end = fieldEnds[fields.prefix] - // console.error("%% header writing prefix", off, end, prefix) - ret = writeText(block, off, end, prefix) || ret - break - } - } - - // paths less than 100 chars don't need a prefix - // and paths longer than 255 need an extended header and will fail - // on old implementations no matter what we do here. - // Null out the prefix, and fallthrough to default. - // console.error("%% header writing no prefix") - var poff = fieldOffs[fields.prefix] - , pend = fieldEnds[fields.prefix] - writeText(block, poff, pend, "") - // fallthrough - - // all other fields are numeric or text - default: - ret = numeric[field] - ? writeNumeric(block, off, end, obj[field]) - : writeText(block, off, end, obj[field] || "") - break - } - obj.needExtended = obj.needExtended || ret - } - - var off = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - writeNumeric(block, off, end, calcSum.call(this, block)) - - return block -} - -// if it's a negative number, or greater than will fit, -// then use write256. -var MAXNUM = { 12: 077777777777 - , 11: 07777777777 - , 8 : 07777777 - , 7 : 0777777 } -function writeNumeric (block, off, end, num) { - var writeLen = end - off - , maxNum = MAXNUM[writeLen] || 0 - - num = num || 0 - // console.error(" numeric", num) - - if (num instanceof Date || - Object.prototype.toString.call(num) === "[object Date]") { - num = num.getTime() / 1000 - } - - if (num > maxNum || num < 0) { - write256(block, off, end, num) - // need an extended header if negative or too big. - return true - } - - // god, tar is so annoying - // if the string is small enough, you should put a space - // between the octal string and the \0, but if it doesn't - // fit, then don't. - var numStr = Math.floor(num).toString(8) - if (num < MAXNUM[writeLen - 1]) numStr += " " - - // pad with "0" chars - if (numStr.length < writeLen) { - numStr = (new Array(writeLen - numStr.length).join("0")) + numStr - } - - if (numStr.length !== writeLen - 1) { - throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" + - "expected: "+writeLen) - } - block.write(numStr, off, writeLen, "utf8") - block[end - 1] = 0 -} - -function write256 (block, off, end, num) { - var buf = block.slice(off, end) - var positive = num >= 0 - buf[0] = positive ? 0x80 : 0xFF - - // get the number as a base-256 tuple - if (!positive) num *= -1 - var tuple = [] - do { - var n = num % 256 - tuple.push(n) - num = (num - n) / 256 - } while (num) - - var bytes = tuple.length - - var fill = buf.length - bytes - for (var i = 1; i < fill; i ++) { - buf[i] = positive ? 0 : 0xFF - } - - // tuple is a base256 number, with [0] as the *least* significant byte - // if it's negative, then we need to flip all the bits once we hit the - // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's- - // complement is (0xFF - n). - var zero = true - for (i = bytes; i > 0; i --) { - var byte = tuple[bytes - i] - if (positive) buf[fill + i] = byte - else if (zero && byte === 0) buf[fill + i] = 0 - else if (zero) { - zero = false - buf[fill + i] = 0x100 - byte - } else buf[fill + i] = 0xFF - byte - } -} - -function writeText (block, off, end, str) { - // strings are written as utf8, then padded with \0 - var strLen = Buffer.byteLength(str) - , writeLen = Math.min(strLen, end - off) - // non-ascii fields need extended headers - // long fields get truncated - , needExtended = strLen !== str.length || strLen > writeLen - - // write the string, and null-pad - if (writeLen > 0) block.write(str, off, writeLen, "utf8") - for (var i = off + writeLen; i < end; i ++) block[i] = 0 - - return needExtended -} - -function calcSum (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - if (!block) throw new Error("Need block to checksum") - - // now figure out what it would be if the cksum was " " - var sum = 0 - , start = fieldOffs[fields.cksum] - , end = fieldEnds[fields.cksum] - - for (var i = 0; i < fieldOffs[fields.cksum]; i ++) { - sum += block[i] - } - - for (var i = start; i < end; i ++) { - sum += space - } - - for (var i = end; i < 512; i ++) { - sum += block[i] - } - - return sum -} - - -function checkSum (block) { - var sum = calcSum.call(this, block) - block = block || this.block - - var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum]) - cksum = parseNumeric(cksum) - - return cksum === sum -} - -function decode (block) { - block = block || this.block - assert(Buffer.isBuffer(block) && block.length === 512) - - this.block = block - this.cksumValid = this.checkSum() - - var prefix = null - - // slice off each field. - for (var f = 0; fields[f] !== null; f ++) { - var field = fields[f] - , val = block.slice(fieldOffs[f], fieldEnds[f]) - - switch (field) { - case "ustar": - // if not ustar, then everything after that is just padding. - if (val.toString() !== "ustar\0") { - this.ustar = false - return - } else { - // console.error("ustar:", val, val.toString()) - this.ustar = val.toString() - } - break - - // prefix is special, since it might signal the xstar header - case "prefix": - var atime = parseNumeric(val.slice(131, 131 + 12)) - , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12)) - if ((val[130] === 0 || val[130] === space) && - typeof atime === "number" && - typeof ctime === "number" && - val[131 + 12] === space && - val[131 + 12 + 12] === space) { - this.atime = atime - this.ctime = ctime - val = val.slice(0, 130) - } - prefix = val.toString("utf8").replace(/\0+$/, "") - // console.error("%% header reading prefix", prefix) - break - - // all other fields are null-padding text - // or a number. - default: - if (numeric[field]) { - this[field] = parseNumeric(val) - } else { - this[field] = val.toString("utf8").replace(/\0+$/, "") - } - break - } - } - - // if we got a prefix, then prepend it to the path. - if (prefix) { - this.path = prefix + "/" + this.path - // console.error("%% header got a prefix", this.path) - } -} - -function parse256 (buf) { - // first byte MUST be either 80 or FF - // 80 for positive, FF for 2's comp - var positive - if (buf[0] === 0x80) positive = true - else if (buf[0] === 0xFF) positive = false - else return null - - // build up a base-256 tuple from the least sig to the highest - var zero = false - , tuple = [] - for (var i = buf.length - 1; i > 0; i --) { - var byte = buf[i] - if (positive) tuple.push(byte) - else if (zero && byte === 0) tuple.push(0) - else if (zero) { - zero = false - tuple.push(0x100 - byte) - } else tuple.push(0xFF - byte) - } - - for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) { - sum += tuple[i] * Math.pow(256, i) - } - - return positive ? sum : -1 * sum -} - -function parseNumeric (f) { - if (f[0] & 0x80) return parse256(f) - - var str = f.toString("utf8").split("\0")[0].trim() - , res = parseInt(str, 8) - - return isNaN(res) ? null : res -} - diff --git a/node_modules/node-gyp/node_modules/tar/lib/pack.js b/node_modules/node-gyp/node_modules/tar/lib/pack.js deleted file mode 100644 index 5a3bb95a121bd..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/pack.js +++ /dev/null @@ -1,236 +0,0 @@ -// pipe in an fstream, and it'll make a tarball. -// key-value pair argument is global extended header props. - -module.exports = Pack - -var EntryWriter = require("./entry-writer.js") - , Stream = require("stream").Stream - , path = require("path") - , inherits = require("inherits") - , GlobalHeaderWriter = require("./global-header-writer.js") - , collect = require("fstream").collect - , eof = new Buffer(512) - -for (var i = 0; i < 512; i ++) eof[i] = 0 - -inherits(Pack, Stream) - -function Pack (props) { - // console.error("-- p ctor") - var me = this - if (!(me instanceof Pack)) return new Pack(props) - - if (props) me._noProprietary = props.noProprietary - else me._noProprietary = false - - me._global = props - - me.readable = true - me.writable = true - me._buffer = [] - // console.error("-- -- set current to null in ctor") - me._currentEntry = null - me._processing = false - - me._pipeRoot = null - me.on("pipe", function (src) { - if (src.root === me._pipeRoot) return - me._pipeRoot = src - src.on("end", function () { - me._pipeRoot = null - }) - me.add(src) - }) -} - -Pack.prototype.addGlobal = function (props) { - // console.error("-- p addGlobal") - if (this._didGlobal) return - this._didGlobal = true - - var me = this - GlobalHeaderWriter(props) - .on("data", function (c) { - me.emit("data", c) - }) - .end() -} - -Pack.prototype.add = function (stream) { - if (this._global && !this._didGlobal) this.addGlobal(this._global) - - if (this._ended) return this.emit("error", new Error("add after end")) - - collect(stream) - this._buffer.push(stream) - this._process() - this._needDrain = this._buffer.length > 0 - return !this._needDrain -} - -Pack.prototype.pause = function () { - this._paused = true - if (this._currentEntry) this._currentEntry.pause() - this.emit("pause") -} - -Pack.prototype.resume = function () { - this._paused = false - if (this._currentEntry) this._currentEntry.resume() - this.emit("resume") - this._process() -} - -Pack.prototype.end = function () { - this._ended = true - this._buffer.push(eof) - this._process() -} - -Pack.prototype._process = function () { - var me = this - if (me._paused || me._processing) { - return - } - - var entry = me._buffer.shift() - - if (!entry) { - if (me._needDrain) { - me.emit("drain") - } - return - } - - if (entry.ready === false) { - // console.error("-- entry is not ready", entry) - me._buffer.unshift(entry) - entry.on("ready", function () { - // console.error("-- -- ready!", entry) - me._process() - }) - return - } - - me._processing = true - - if (entry === eof) { - // need 2 ending null blocks. - me.emit("data", eof) - me.emit("data", eof) - me.emit("end") - me.emit("close") - return - } - - // Change the path to be relative to the root dir that was - // added to the tarball. - // - // XXX This should be more like how -C works, so you can - // explicitly set a root dir, and also explicitly set a pathname - // in the tarball to use. That way we can skip a lot of extra - // work when resolving symlinks for bundled dependencies in npm. - - var root = path.dirname((entry.root || entry).path); - if (me._global && me._global.fromBase && entry.root && entry.root.path) { - // user set 'fromBase: true' indicating tar root should be directory itself - root = entry.root.path; - } - - var wprops = {} - - Object.keys(entry.props || {}).forEach(function (k) { - wprops[k] = entry.props[k] - }) - - if (me._noProprietary) wprops.noProprietary = true - - wprops.path = path.relative(root, entry.path || '') - - // actually not a matter of opinion or taste. - if (process.platform === "win32") { - wprops.path = wprops.path.replace(/\\/g, "/") - } - - if (!wprops.type) - wprops.type = 'Directory' - - switch (wprops.type) { - // sockets not supported - case "Socket": - return - - case "Directory": - wprops.path += "/" - wprops.size = 0 - break - - case "Link": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(root, lp) || "." - wprops.size = 0 - break - - case "SymbolicLink": - var lp = path.resolve(path.dirname(entry.path), entry.linkpath) - wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "." - wprops.size = 0 - break - } - - // console.error("-- new writer", wprops) - // if (!wprops.type) { - // // console.error("-- no type?", entry.constructor.name, entry) - // } - - // console.error("-- -- set current to new writer", wprops.path) - var writer = me._currentEntry = EntryWriter(wprops) - - writer.parent = me - - // writer.on("end", function () { - // // console.error("-- -- writer end", writer.path) - // }) - - writer.on("data", function (c) { - me.emit("data", c) - }) - - writer.on("header", function () { - Buffer.prototype.toJSON = function () { - return this.toString().split(/\0/).join(".") - } - // console.error("-- -- writer header %j", writer.props) - if (writer.props.size === 0) nextEntry() - }) - writer.on("close", nextEntry) - - var ended = false - function nextEntry () { - if (ended) return - ended = true - - // console.error("-- -- writer close", writer.path) - // console.error("-- -- set current to null", wprops.path) - me._currentEntry = null - me._processing = false - me._process() - } - - writer.on("error", function (er) { - // console.error("-- -- writer error", writer.path) - me.emit("error", er) - }) - - // if it's the root, then there's no need to add its entries, - // or data, since they'll be added directly. - if (entry === me._pipeRoot) { - // console.error("-- is the root, don't auto-add") - writer.add = null - } - - entry.pipe(writer) -} - -Pack.prototype.destroy = function () {} -Pack.prototype.write = function () {} diff --git a/node_modules/node-gyp/node_modules/tar/lib/parse.js b/node_modules/node-gyp/node_modules/tar/lib/parse.js deleted file mode 100644 index 1c66ebdaf0c15..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/lib/parse.js +++ /dev/null @@ -1,281 +0,0 @@ - -// A writable stream. -// It emits "entry" events, which provide a readable stream that has -// header info attached. - -module.exports = Parse.create = Parse - -var stream = require("stream") - , Stream = stream.Stream - , BlockStream = require("block-stream") - , tar = require("../tar.js") - , TarHeader = require("./header.js") - , Entry = require("./entry.js") - , BufferEntry = require("./buffer-entry.js") - , ExtendedHeader = require("./extended-header.js") - , assert = require("assert").ok - , inherits = require("inherits") - , fstream = require("fstream") - -// reading a tar is a lot like reading a directory -// However, we're actually not going to run the ctor, -// since it does a stat and various other stuff. -// This inheritance gives us the pause/resume/pipe -// behavior that is desired. -inherits(Parse, fstream.Reader) - -function Parse () { - var me = this - if (!(me instanceof Parse)) return new Parse() - - // doesn't apply fstream.Reader ctor? - // no, becasue we don't want to stat/etc, we just - // want to get the entry/add logic from .pipe() - Stream.apply(me) - - me.writable = true - me.readable = true - me._stream = new BlockStream(512) - me.position = 0 - me._ended = false - me._hardLinks = {} - - me._stream.on("error", function (e) { - me.emit("error", e) - }) - - me._stream.on("data", function (c) { - me._process(c) - }) - - me._stream.on("end", function () { - me._streamEnd() - }) - - me._stream.on("drain", function () { - me.emit("drain") - }) -} - -// overridden in Extract class, since it needs to -// wait for its DirWriter part to finish before -// emitting "end" -Parse.prototype._streamEnd = function () { - var me = this - if (!me._ended || me._entry) me.error("unexpected eof") - me.emit("end") -} - -// a tar reader is actually a filter, not just a readable stream. -// So, you should pipe a tarball stream into it, and it needs these -// write/end methods to do that. -Parse.prototype.write = function (c) { - if (this._ended) { - // gnutar puts a LOT of nulls at the end. - // you can keep writing these things forever. - // Just ignore them. - for (var i = 0, l = c.length; i > l; i ++) { - if (c[i] !== 0) return this.error("write() after end()") - } - return - } - return this._stream.write(c) -} - -Parse.prototype.end = function (c) { - this._ended = true - return this._stream.end(c) -} - -// don't need to do anything, since we're just -// proxying the data up from the _stream. -// Just need to override the parent's "Not Implemented" -// error-thrower. -Parse.prototype._read = function () {} - -Parse.prototype._process = function (c) { - assert(c && c.length === 512, "block size should be 512") - - // one of three cases. - // 1. A new header - // 2. A part of a file/extended header - // 3. One of two or more EOF null blocks - - if (this._entry) { - var entry = this._entry - if(!entry._abort) entry.write(c) - else { - entry._remaining -= c.length - if(entry._remaining < 0) entry._remaining = 0 - } - if (entry._remaining === 0) { - entry.end() - this._entry = null - } - } else { - // either zeroes or a header - var zero = true - for (var i = 0; i < 512 && zero; i ++) { - zero = c[i] === 0 - } - - // eof is *at least* 2 blocks of nulls, and then the end of the - // file. you can put blocks of nulls between entries anywhere, - // so appending one tarball to another is technically valid. - // ending without the eof null blocks is not allowed, however. - if (zero) { - if (this._eofStarted) - this._ended = true - this._eofStarted = true - } else { - this._eofStarted = false - this._startEntry(c) - } - } - - this.position += 512 -} - -// take a header chunk, start the right kind of entry. -Parse.prototype._startEntry = function (c) { - var header = new TarHeader(c) - , self = this - , entry - , ev - , EntryType - , onend - , meta = false - - if (null === header.size || !header.cksumValid) { - var e = new Error("invalid tar file") - e.header = header - e.tar_file_offset = this.position - e.tar_block = this.position / 512 - return this.emit("error", e) - } - - switch (tar.types[header.type]) { - case "File": - case "OldFile": - case "Link": - case "SymbolicLink": - case "CharacterDevice": - case "BlockDevice": - case "Directory": - case "FIFO": - case "ContiguousFile": - case "GNUDumpDir": - // start a file. - // pass in any extended headers - // These ones consumers are typically most interested in. - EntryType = Entry - ev = "entry" - break - - case "GlobalExtendedHeader": - // extended headers that apply to the rest of the tarball - EntryType = ExtendedHeader - onend = function () { - self._global = self._global || {} - Object.keys(entry.fields).forEach(function (k) { - self._global[k] = entry.fields[k] - }) - } - ev = "globalExtendedHeader" - meta = true - break - - case "ExtendedHeader": - case "OldExtendedHeader": - // extended headers that apply to the next entry - EntryType = ExtendedHeader - onend = function () { - self._extended = entry.fields - } - ev = "extendedHeader" - meta = true - break - - case "NextFileHasLongLinkpath": - // set linkpath= in extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.linkpath = entry.body - } - ev = "longLinkpath" - meta = true - break - - case "NextFileHasLongPath": - case "OldGnuLongPath": - // set path= in file-extended header - EntryType = BufferEntry - onend = function () { - self._extended = self._extended || {} - self._extended.path = entry.body - } - ev = "longPath" - meta = true - break - - default: - // all the rest we skip, but still set the _entry - // member, so that we can skip over their data appropriately. - // emit an event to say that this is an ignored entry type? - EntryType = Entry - ev = "ignoredEntry" - break - } - - var global, extended - if (meta) { - global = extended = null - } else { - var global = this._global - var extended = this._extended - - // extendedHeader only applies to one entry, so once we start - // an entry, it's over. - this._extended = null - } - entry = new EntryType(header, extended, global) - entry.meta = meta - - // only proxy data events of normal files. - if (!meta) { - entry.on("data", function (c) { - me.emit("data", c) - }) - } - - if (onend) entry.on("end", onend) - - this._entry = entry - - if (entry.type === "Link") { - this._hardLinks[entry.path] = entry - } - - var me = this - - entry.on("pause", function () { - me.pause() - }) - - entry.on("resume", function () { - me.resume() - }) - - if (this.listeners("*").length) { - this.emit("*", ev, entry) - } - - this.emit(ev, entry) - - // Zero-byte entry. End immediately. - if (entry.props.size === 0) { - entry.end() - this._entry = null - } -} diff --git a/node_modules/node-gyp/node_modules/tar/package.json b/node_modules/node-gyp/node_modules/tar/package.json deleted file mode 100644 index 6e7fb2ed71093..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "_from": "tar@^2.0.0", - "_id": "tar@2.2.2", - "_inBundle": false, - "_integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==", - "_location": "/node-gyp/tar", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "tar@^2.0.0", - "name": "tar", - "escapedName": "tar", - "rawSpec": "^2.0.0", - "saveSpec": null, - "fetchSpec": "^2.0.0" - }, - "_requiredBy": [ - "/node-gyp" - ], - "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz", - "_shasum": "0ca8848562c7299b8b446ff6a4d60cdbb23edc40", - "_spec": "tar@^2.0.0", - "_where": "/Users/isaacs/dev/npm/cli/node_modules/node-gyp", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/node-tar/issues" - }, - "bundleDependencies": false, - "dependencies": { - "block-stream": "*", - "fstream": "^1.0.12", - "inherits": "2" - }, - "deprecated": false, - "description": "tar for node", - "devDependencies": { - "graceful-fs": "^4.1.2", - "mkdirp": "^0.5.0", - "rimraf": "1.x", - "tap": "0.x" - }, - "homepage": "https://github.com/isaacs/node-tar#readme", - "license": "ISC", - "main": "tar.js", - "name": "tar", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-tar.git" - }, - "scripts": { - "test": "tap test/*.js" - }, - "version": "2.2.2" -} diff --git a/node_modules/node-gyp/node_modules/tar/tar.js b/node_modules/node-gyp/node_modules/tar/tar.js deleted file mode 100644 index a81298b9a0b12..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/tar.js +++ /dev/null @@ -1,173 +0,0 @@ -// field paths that every tar file must have. -// header is padded to 512 bytes. -var f = 0 - , fields = {} - , path = fields.path = f++ - , mode = fields.mode = f++ - , uid = fields.uid = f++ - , gid = fields.gid = f++ - , size = fields.size = f++ - , mtime = fields.mtime = f++ - , cksum = fields.cksum = f++ - , type = fields.type = f++ - , linkpath = fields.linkpath = f++ - , headerSize = 512 - , blockSize = 512 - , fieldSize = [] - -fieldSize[path] = 100 -fieldSize[mode] = 8 -fieldSize[uid] = 8 -fieldSize[gid] = 8 -fieldSize[size] = 12 -fieldSize[mtime] = 12 -fieldSize[cksum] = 8 -fieldSize[type] = 1 -fieldSize[linkpath] = 100 - -// "ustar\0" may introduce another bunch of headers. -// these are optional, and will be nulled out if not present. - -var ustar = fields.ustar = f++ - , ustarver = fields.ustarver = f++ - , uname = fields.uname = f++ - , gname = fields.gname = f++ - , devmaj = fields.devmaj = f++ - , devmin = fields.devmin = f++ - , prefix = fields.prefix = f++ - , fill = fields.fill = f++ - -// terminate fields. -fields[f] = null - -fieldSize[ustar] = 6 -fieldSize[ustarver] = 2 -fieldSize[uname] = 32 -fieldSize[gname] = 32 -fieldSize[devmaj] = 8 -fieldSize[devmin] = 8 -fieldSize[prefix] = 155 -fieldSize[fill] = 12 - -// nb: prefix field may in fact be 130 bytes of prefix, -// a null char, 12 bytes for atime, 12 bytes for ctime. -// -// To recognize this format: -// 1. prefix[130] === ' ' or '\0' -// 2. atime and ctime are octal numeric values -// 3. atime and ctime have ' ' in their last byte - -var fieldEnds = {} - , fieldOffs = {} - , fe = 0 -for (var i = 0; i < f; i ++) { - fieldOffs[i] = fe - fieldEnds[i] = (fe += fieldSize[i]) -} - -// build a translation table of field paths. -Object.keys(fields).forEach(function (f) { - if (fields[f] !== null) fields[fields[f]] = f -}) - -// different values of the 'type' field -// paths match the values of Stats.isX() functions, where appropriate -var types = - { 0: "File" - , "\0": "OldFile" // like 0 - , "": "OldFile" - , 1: "Link" - , 2: "SymbolicLink" - , 3: "CharacterDevice" - , 4: "BlockDevice" - , 5: "Directory" - , 6: "FIFO" - , 7: "ContiguousFile" // like 0 - // posix headers - , g: "GlobalExtendedHeader" // k=v for the rest of the archive - , x: "ExtendedHeader" // k=v for the next file - // vendor-specific stuff - , A: "SolarisACL" // skip - , D: "GNUDumpDir" // like 5, but with data, which should be skipped - , I: "Inode" // metadata only, skip - , K: "NextFileHasLongLinkpath" // data = link path of next file - , L: "NextFileHasLongPath" // data = path of next file - , M: "ContinuationFile" // skip - , N: "OldGnuLongPath" // like L - , S: "SparseFile" // skip - , V: "TapeVolumeHeader" // skip - , X: "OldExtendedHeader" // like x - } - -Object.keys(types).forEach(function (t) { - types[types[t]] = types[types[t]] || t -}) - -// values for the mode field -var modes = - { suid: 04000 // set uid on extraction - , sgid: 02000 // set gid on extraction - , svtx: 01000 // set restricted deletion flag on dirs on extraction - , uread: 0400 - , uwrite: 0200 - , uexec: 0100 - , gread: 040 - , gwrite: 020 - , gexec: 010 - , oread: 4 - , owrite: 2 - , oexec: 1 - , all: 07777 - } - -var numeric = - { mode: true - , uid: true - , gid: true - , size: true - , mtime: true - , devmaj: true - , devmin: true - , cksum: true - , atime: true - , ctime: true - , dev: true - , ino: true - , nlink: true - } - -Object.keys(modes).forEach(function (t) { - modes[modes[t]] = modes[modes[t]] || t -}) - -var knownExtended = - { atime: true - , charset: true - , comment: true - , ctime: true - , gid: true - , gname: true - , linkpath: true - , mtime: true - , path: true - , realtime: true - , security: true - , size: true - , uid: true - , uname: true } - - -exports.fields = fields -exports.fieldSize = fieldSize -exports.fieldOffs = fieldOffs -exports.fieldEnds = fieldEnds -exports.types = types -exports.modes = modes -exports.numeric = numeric -exports.headerSize = headerSize -exports.blockSize = blockSize -exports.knownExtended = knownExtended - -exports.Pack = require("./lib/pack.js") -exports.Parse = require("./lib/parse.js") -exports.Extract = require("./lib/extract.js") diff --git a/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js b/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js deleted file mode 100644 index 1524ff7af0570..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js +++ /dev/null @@ -1,53 +0,0 @@ -// the fixtures have some weird stuff that is painful -// to include directly in the repo for various reasons. -// -// So, unpack the fixtures with the system tar first. -// -// This means, of course, that it'll only work if you -// already have a tar implementation, and some of them -// will not properly unpack the fixtures anyway. -// -// But, since usually those tests will fail on Windows -// and other systems with less capable filesystems anyway, -// at least this way we don't cause inconveniences by -// merely cloning the repo or installing the package. - -var tap = require("tap") -, child_process = require("child_process") -, rimraf = require("rimraf") -, test = tap.test -, path = require("path") - -test("clean fixtures", function (t) { - rimraf(path.resolve(__dirname, "fixtures"), function (er) { - t.ifError(er, "rimraf ./fixtures/") - t.end() - }) -}) - -test("clean tmp", function (t) { - rimraf(path.resolve(__dirname, "tmp"), function (er) { - t.ifError(er, "rimraf ./tmp/") - t.end() - }) -}) - -test("extract fixtures", function (t) { - var c = child_process.spawn("tar" - ,["xzvf", "fixtures.tgz"] - ,{ cwd: __dirname }) - - c.stdout.on("data", errwrite) - c.stderr.on("data", errwrite) - function errwrite (chunk) { - process.stderr.write(chunk) - } - - c.on("exit", function (code) { - t.equal(code, 0, "extract fixtures should exit with 0") - if (code) { - t.comment("Note, all tests from here on out will fail because of this.") - } - t.end() - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz b/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz deleted file mode 100644 index 9e7014d85abe4..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js b/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js deleted file mode 100644 index 9719c42f35400..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js +++ /dev/null @@ -1,177 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var fs = require('fs') -var path = require('path') - -var fstream = require('fstream') -var test = require('tap').test - -var tar = require('../tar.js') -var file = path.resolve(__dirname, 'dir-normalization.tar') -var target = path.resolve(__dirname, 'tmp/dir-normalization-test') -var ee = 0 - -var expectEntries = [ - { path: 'fixtures/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/the-chumbler', - mode: '755', - type: '2', - linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'), - }, - { path: 'fixtures/a/b/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/x', - mode: '644', - type: '0', - linkpath: '' - }, - { path: 'fixtures/a/b/c/', - mode: '755', - type: '5', - linkpath: '' - }, - { path: 'fixtures/a/b/c/y', - mode: '755', - type: '2', - linkpath: '../../x', - } -] - -var ef = 0 -var expectFiles = [ - { path: '', - mode: '40755', - type: 'Directory', - depth: 0, - linkpath: undefined - }, - { path: '/fixtures', - mode: '40755', - type: 'Directory', - depth: 1, - linkpath: undefined - }, - { path: '/fixtures/a', - mode: '40755', - type: 'Directory', - depth: 2, - linkpath: undefined - }, - { path: '/fixtures/a/b', - mode: '40755', - type: 'Directory', - depth: 3, - linkpath: undefined - }, - { path: '/fixtures/a/b/c', - mode: '40755', - type: 'Directory', - depth: 4, - linkpath: undefined - }, - { path: '/fixtures/a/b/c/y', - mode: '120755', - type: 'SymbolicLink', - depth: 5, - linkpath: '../../x' - }, - { path: '/fixtures/a/x', - mode: '100644', - type: 'File', - depth: 3, - linkpath: undefined - }, - { path: '/fixtures/the-chumbler', - mode: '120755', - type: 'SymbolicLink', - depth: 2, - linkpath: path.resolve(target, 'a/b/c/d/the-chumbler') - } -] - -test('preclean', function (t) { - require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test')) - t.pass('cleaned!') - t.end() -}) - -test('extract test', function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - inp.pipe(extract) - - extract.on('end', function () { - t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries') - - // should get no more entries after end - extract.removeAllListeners('entry') - extract.on('entry', function (e) { - t.fail('Should not get entries after end!') - }) - - next() - }) - - extract.on('entry', function (entry) { - var mode = entry.props.mode & (~parseInt('22', 8)) - var found = { - path: entry.path, - mode: mode.toString(8), - type: entry.props.type, - linkpath: entry.props.linkpath, - } - - var wanted = expectEntries[ee++] - t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path)) - }) - - function next () { - var r = fstream.Reader({ - path: target, - type: 'Directory', - sort: 'alpha' - }) - - r.on('ready', function () { - foundEntry(r) - }) - - r.on('end', finish) - - function foundEntry (entry) { - var p = entry.path.substr(target.length) - var mode = entry.props.mode & (~parseInt('22', 8)) - var found = { - path: p, - mode: mode.toString(8), - type: entry.props.type, - depth: entry.props.depth, - linkpath: entry.props.linkpath - } - - var wanted = expectFiles[ef++] - t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path)) - - entry.on('entry', foundEntry) - } - - function finish () { - t.equal(ef, expectFiles.length, 'should have ' + ef + ' items') - t.end() - } - } -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar b/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar deleted file mode 100644 index 3c4845356cecb..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js b/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js deleted file mode 100644 index e484920fd9625..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js +++ /dev/null @@ -1,33 +0,0 @@ -var fs = require('fs') -var path = require('path') -var zlib = require('zlib') - -var tap = require('tap') - -var tar = require('../tar.js') - -var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz') -var target = path.join(__dirname, 'tmp/extract-test') - -tap.test('preclean', function (t) { - require('rimraf').sync(__dirname + '/tmp/extract-test') - t.pass('cleaned!') - t.end() -}) - -tap.test('extract test', function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - inp.pipe(zlib.createGunzip()).pipe(extract) - - extract.on('error', function (er) { - t.equal(er.message, 'unexpected eof', 'error noticed') - t.end() - }) - - extract.on('end', function () { - t.fail('shouldn\'t reach this point due to errors') - t.end() - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/extract-move.js b/node_modules/node-gyp/node_modules/tar/test/extract-move.js deleted file mode 100644 index 45400cd9bb818..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/extract-move.js +++ /dev/null @@ -1,132 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , gfs = require("graceful-fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/dir.tar") - , target = path.resolve(__dirname, "tmp/extract-test") - , index = 0 - , fstream = require("fstream") - , rimraf = require("rimraf") - , mkdirp = require("mkdirp") - - , ee = 0 - , expectEntries = [ - { - "path" : "dir/", - "mode" : "750", - "type" : "5", - "depth" : undefined, - "size" : 0, - "linkpath" : "", - "nlink" : undefined, - "dev" : undefined, - "ino" : undefined - }, - { - "path" : "dir/sub/", - "mode" : "750", - "type" : "5", - "depth" : undefined, - "size" : 0, - "linkpath" : "", - "nlink" : undefined, - "dev" : undefined, - "ino" : undefined - } ] - -function slow (fs, method, t1, t2) { - var orig = fs[method] - if (!orig) return null - fs[method] = function () { - var args = [].slice.call(arguments) - console.error("slow", method, args[0]) - var cb = args.pop() - - setTimeout(function () { - orig.apply(fs, args.concat(function(er, data) { - setTimeout(function() { - cb(er, data) - }, t2) - })) - }, t1) - } -} - -// Make sure we get the graceful-fs that fstream is using. -var gfs2 -try { - gfs2 = require("fstream/node_modules/graceful-fs") -} catch (er) {} - -var slowMethods = ["chown", "chmod", "utimes", "lutimes"] -slowMethods.forEach(function (method) { - var t1 = 500 - var t2 = 0 - slow(fs, method, t1, t2) - slow(gfs, method, t1, t2) - if (gfs2) { - slow(gfs2, method, t1, t2) - } -}) - - - -// The extract class basically just pipes the input -// to a Reader, and then to a fstream.DirWriter - -// So, this is as much a test of fstream.Reader and fstream.Writer -// as it is of tar.Extract, but it sort of makes sense. - -tap.test("preclean", function (t) { - rimraf.sync(target) - /mkdirp.sync(target) - t.pass("cleaned!") - t.end() -}) - -tap.test("extract test", function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - // give it a weird buffer size to try to break in odd places - inp.bufferSize = 1234 - - inp.pipe(extract) - - extract.on("end", function () { - rimraf.sync(target) - - t.equal(ee, expectEntries.length, "should see "+ee+" entries") - - // should get no more entries after end - extract.removeAllListeners("entry") - extract.on("entry", function (e) { - t.fail("Should not get entries after end!") - }) - - t.end() - }) - - - extract.on("entry", function (entry) { - var found = - { path: entry.path - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - , dev: entry.props.dev - , ino: entry.props.ino - } - - var wanted = expectEntries[ee ++] - - t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/extract.js b/node_modules/node-gyp/node_modules/tar/test/extract.js deleted file mode 100644 index eca4e7cc962db..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/extract.js +++ /dev/null @@ -1,367 +0,0 @@ -// Set the umask, so that it works the same everywhere. -process.umask(parseInt('22', 8)) - -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - , target = path.resolve(__dirname, "tmp/extract-test") - , index = 0 - , fstream = require("fstream") - - , ee = 0 - , expectEntries = -[ { path: 'c.txt', - mode: '644', - type: '0', - depth: undefined, - size: 513, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'cc.txt', - mode: '644', - type: '0', - depth: undefined, - size: 513, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 100, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'Ω.txt', - mode: '644', - type: '0', - depth: undefined, - size: 2, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: 'Ω.txt', - mode: '644', - type: '0', - depth: undefined, - size: 2, - linkpath: '', - nlink: 1, - dev: 234881026, - ino: 51693379 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 200, - linkpath: '', - nlink: 1, - dev: 234881026, - ino: 51681874 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '0', - depth: undefined, - size: 201, - linkpath: '', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: '777', - type: '2', - depth: undefined, - size: 0, - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - nlink: undefined, - dev: undefined, - ino: undefined }, - { path: '200-hard', - mode: '644', - type: '0', - depth: undefined, - size: 200, - linkpath: '', - nlink: 2, - dev: 234881026, - ino: 51681874 }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '644', - type: '1', - depth: undefined, - size: 0, - linkpath: path.resolve(target, '200-hard'), - nlink: 2, - dev: 234881026, - ino: 51681874 } ] - - , ef = 0 - , expectFiles = -[ { path: '', - mode: '40755', - type: 'Directory', - depth: 0, - linkpath: undefined }, - { path: '/200-hard', - mode: '100644', - type: 'File', - depth: 1, - size: 200, - linkpath: undefined, - nlink: 2 }, - { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: '120777', - type: 'SymbolicLink', - depth: 1, - size: 200, - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - nlink: 1 }, - { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '100644', - type: 'Link', - depth: 1, - size: 200, - linkpath: path.join(target, '200-hard'), - nlink: 2 }, - { path: '/c.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 513, - linkpath: undefined, - nlink: 1 }, - { path: '/cc.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 513, - linkpath: undefined, - nlink: 1 }, - { path: '/r', - mode: '40755', - type: 'Directory', - depth: 1, - linkpath: undefined }, - { path: '/r/e', - mode: '40755', - type: 'Directory', - depth: 2, - linkpath: undefined }, - { path: '/r/e/a', - mode: '40755', - type: 'Directory', - depth: 3, - linkpath: undefined }, - { path: '/r/e/a/l', - mode: '40755', - type: 'Directory', - depth: 4, - linkpath: undefined }, - { path: '/r/e/a/l/l', - mode: '40755', - type: 'Directory', - depth: 5, - linkpath: undefined }, - { path: '/r/e/a/l/l/y', - mode: '40755', - type: 'Directory', - depth: 6, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-', - mode: '40755', - type: 'Directory', - depth: 7, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d', - mode: '40755', - type: 'Directory', - depth: 8, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e', - mode: '40755', - type: 'Directory', - depth: 9, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e', - mode: '40755', - type: 'Directory', - depth: 10, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p', - mode: '40755', - type: 'Directory', - depth: 11, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-', - mode: '40755', - type: 'Directory', - depth: 12, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f', - mode: '40755', - type: 'Directory', - depth: 13, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o', - mode: '40755', - type: 'Directory', - depth: 14, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l', - mode: '40755', - type: 'Directory', - depth: 15, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d', - mode: '40755', - type: 'Directory', - depth: 16, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e', - mode: '40755', - type: 'Directory', - depth: 17, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r', - mode: '40755', - type: 'Directory', - depth: 18, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-', - mode: '40755', - type: 'Directory', - depth: 19, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p', - mode: '40755', - type: 'Directory', - depth: 20, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a', - mode: '40755', - type: 'Directory', - depth: 21, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', - mode: '40755', - type: 'Directory', - depth: 22, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h', - mode: '40755', - type: 'Directory', - depth: 23, - linkpath: undefined }, - { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: '100644', - type: 'File', - depth: 24, - size: 100, - linkpath: undefined, - nlink: 1 }, - { path: '/Ω.txt', - mode: '100644', - type: 'File', - depth: 1, - size: 2, - linkpath: undefined, - nlink: 1 } ] - - - -// The extract class basically just pipes the input -// to a Reader, and then to a fstream.DirWriter - -// So, this is as much a test of fstream.Reader and fstream.Writer -// as it is of tar.Extract, but it sort of makes sense. - -tap.test("preclean", function (t) { - require("rimraf").sync(__dirname + "/tmp/extract-test") - t.pass("cleaned!") - t.end() -}) - -tap.test("extract test", function (t) { - var extract = tar.Extract(target) - var inp = fs.createReadStream(file) - - // give it a weird buffer size to try to break in odd places - inp.bufferSize = 1234 - - inp.pipe(extract) - - extract.on("end", function () { - t.equal(ee, expectEntries.length, "should see "+ee+" entries") - - // should get no more entries after end - extract.removeAllListeners("entry") - extract.on("entry", function (e) { - t.fail("Should not get entries after end!") - }) - - next() - }) - - extract.on("entry", function (entry) { - var found = - { path: entry.path - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - , dev: entry.props.dev - , ino: entry.props.ino - } - - var wanted = expectEntries[ee ++] - - t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) - }) - - function next () { - var r = fstream.Reader({ path: target - , type: "Directory" - // this is just to encourage consistency - , sort: "alpha" }) - - r.on("ready", function () { - foundEntry(r) - }) - - r.on("end", finish) - - function foundEntry (entry) { - var p = entry.path.substr(target.length) - var found = - { path: p - , mode: entry.props.mode.toString(8) - , type: entry.props.type - , depth: entry.props.depth - , size: entry.props.size - , linkpath: entry.props.linkpath - , nlink: entry.props.nlink - } - - var wanted = expectFiles[ef ++] - - t.has(found, wanted, "unpacked file " + ef + " " + wanted.path) - - entry.on("entry", foundEntry) - } - - function finish () { - t.equal(ef, expectFiles.length, "should have "+ef+" items") - t.end() - } - } -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz b/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz deleted file mode 100644 index f1676023afa2b..0000000000000 Binary files a/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz and /dev/null differ diff --git a/node_modules/node-gyp/node_modules/tar/test/header.js b/node_modules/node-gyp/node_modules/tar/test/header.js deleted file mode 100644 index 8ea6f79500de7..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/header.js +++ /dev/null @@ -1,183 +0,0 @@ -var tap = require("tap") -var TarHeader = require("../lib/header.js") -var tar = require("../tar.js") -var fs = require("fs") - - -var headers = - { "a.txt file header": - [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'a.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 257 - , mtime: 1319493851 - , cksum: 5417 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "omega pax": // the extended header from omega tar. - [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'PaxHeader/Ω.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 120 - , mtime: 1301254537 - , cksum: 6697 - , type: 'x' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } ] - - , "omega file header": - [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'Ω.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 2 - , mtime: 1301254537 - , cksum: 5690 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } ] - - , "foo.js file header": - [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'foo.js' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 4 - , mtime: 1301246433 - , cksum: 5519 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "b.txt file header": - [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true - , path: 'b.txt' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 512 - , mtime: 1319494079 - , cksum: 5425 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - - , "deep nested file": - [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - , { cksumValid: true, - path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' - , mode: 420 - , uid: 24561 - , gid: 20 - , size: 100 - , mtime: 1319687003 - , cksum: 18124 - , type: '0' - , linkpath: '' - , ustar: 'ustar\0' - , ustarver: '00' - , uname: 'isaacs' - , gname: 'staff' - , devmaj: 0 - , devmin: 0 - , fill: '' } - ] - } - -tap.test("parsing", function (t) { - Object.keys(headers).forEach(function (name) { - var h = headers[name] - , header = new Buffer(h[0], "hex") - , expect = h[1] - , parsed = new TarHeader(header) - - // console.error(parsed) - t.has(parsed, expect, "parse " + name) - }) - t.end() -}) - -tap.test("encoding", function (t) { - Object.keys(headers).forEach(function (name) { - var h = headers[name] - , expect = new Buffer(h[0], "hex") - , encoded = TarHeader.encode(h[1]) - - // might have slightly different bytes, since the standard - // isn't very strict, but should have the same semantics - // checkSum will be different, but cksumValid will be true - - var th = new TarHeader(encoded) - delete h[1].block - delete h[1].needExtended - delete h[1].cksum - t.has(th, h[1], "fields "+name) - }) - t.end() -}) - -// test these manually. they're a bit rare to find in the wild -tap.test("parseNumeric tests", function (t) { - var parseNumeric = TarHeader.parseNumeric - , numbers = - { "303737373737373700": 2097151 - , "30373737373737373737373700": 8589934591 - , "303030303036343400": 420 - , "800000ffffffffffff": 281474976710655 - , "ffffff000000000001": -281474976710654 - , "ffffff000000000000": -281474976710655 - , "800000000000200000": 2097152 - , "8000000000001544c5": 1393861 - , "ffffffffffff1544c5": -15383354 } - Object.keys(numbers).forEach(function (n) { - var b = new Buffer(n, "hex") - t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n]) - }) - t.end() -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js b/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js deleted file mode 100644 index d4b03a1fe936b..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js +++ /dev/null @@ -1,886 +0,0 @@ -// This is exactly like test/pack.js, except that it's excluding -// any proprietary headers. -// -// This loses some information about the filesystem, but creates -// tarballs that are supported by more versions of tar, especially -// old non-spec-compliant copies of gnutar. - -// the symlink file is excluded from git, because it makes -// windows freak the hell out. -var fs = require("fs") - , path = require("path") - , symlink = path.resolve(__dirname, "fixtures/symlink") -try { fs.unlinkSync(symlink) } catch (e) {} -fs.symlinkSync("./hardlink-1", symlink) -process.on("exit", function () { - fs.unlinkSync(symlink) -}) - -var tap = require("tap") - , tar = require("../tar.js") - , pkg = require("../package.json") - , Pack = tar.Pack - , fstream = require("fstream") - , Reader = fstream.Reader - , Writer = fstream.Writer - , input = path.resolve(__dirname, "fixtures/") - , target = path.resolve(__dirname, "tmp/pack.tar") - , uid = process.getuid ? process.getuid() : 0 - , gid = process.getgid ? process.getgid() : 0 - - , entries = - - // the global header and root fixtures/ dir are going to get - // a different date each time, so omit that bit. - // Also, dev/ino values differ across machines, so that's not - // included. - [ [ 'entry', - { path: 'fixtures/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - uid: uid, - gid: gid, - size: 200 } ] - - , [ 'entry', - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/a.txt', - mode: 420, - uid: uid, - gid: gid, - size: 257, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/b.txt', - mode: 420, - uid: uid, - gid: gid, - size: 512, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/c.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/cc.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/sub/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/foo.js', - mode: 420, - uid: uid, - gid: gid, - size: 4, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-1', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-2', - mode: 420, - uid: uid, - gid: gid, - size: 0, - type: '1', - linkpath: 'fixtures/hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/star.4.html', - mode: 420, - uid: uid, - gid: gid, - size: 54081, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/packtest/Ω.txt', - uid: uid, - gid: gid, - size: 2 } ] - - , [ 'entry', - { path: 'fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 100, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/symlink', - uid: uid, - gid: gid, - size: 0, - type: '2', - linkpath: 'hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: "fixtures/Ω.txt" - , uid: uid - , gid: gid - , size: 2 } ] - - , [ 'entry', - { path: 'fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - ] - - -// first, make sure that the hardlinks are actually hardlinks, or this -// won't work. Git has a way of replacing them with a copy. -var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") - , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") - , fs = require("fs") - -try { fs.unlinkSync(hard2) } catch (e) {} -fs.linkSync(hard1, hard2) - -tap.test("with global header", { timeout: 10000 }, function (t) { - runTest(t, true) -}) - -tap.test("without global header", { timeout: 10000 }, function (t) { - runTest(t, false) -}) - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - - -function runTest (t, doGH) { - var reader = Reader({ path: input - , filter: function () { - return !this.path.match(/\.(tar|hex)$/) - } - , sort: alphasort - }) - - var props = doGH ? pkg : {} - props.noProprietary = true - var pack = Pack(props) - var writer = Writer(target) - - // global header should be skipped regardless, since it has no content. - var entry = 0 - - t.ok(reader, "reader ok") - t.ok(pack, "pack ok") - t.ok(writer, "writer ok") - - pack.pipe(writer) - - var parse = tar.Parse() - t.ok(parse, "parser should be ok") - - pack.on("data", function (c) { - // console.error("PACK DATA") - if (c.length !== 512) { - // this one is too noisy, only assert if it'll be relevant - t.equal(c.length, 512, "parser should emit data in 512byte blocks") - } - parse.write(c) - }) - - pack.on("end", function () { - // console.error("PACK END") - t.pass("parser ends") - parse.end() - }) - - pack.on("error", function (er) { - t.fail("pack error", er) - }) - - parse.on("error", function (er) { - t.fail("parse error", er) - }) - - writer.on("error", function (er) { - t.fail("writer error", er) - }) - - reader.on("error", function (er) { - t.fail("reader error", er) - }) - - parse.on("*", function (ev, e) { - var wanted = entries[entry++] - if (!wanted) { - t.fail("unexpected event: "+ev) - return - } - t.equal(ev, wanted[0], "event type should be "+wanted[0]) - - if (ev !== wanted[0] || e.path !== wanted[1].path) { - console.error("wanted", wanted) - console.error([ev, e.props]) - e.on("end", function () { - console.error(e.fields) - throw "break" - }) - } - - t.has(e.props, wanted[1], "properties "+wanted[1].path) - if (wanted[2]) { - e.on("end", function () { - if (!e.fields) { - t.ok(e.fields, "should get fields") - } else { - t.has(e.fields, wanted[2], "should get expected fields") - } - }) - } - }) - - reader.pipe(pack) - - writer.on("close", function () { - t.equal(entry, entries.length, "should get all expected entries") - t.pass("it finished") - t.end() - }) - -} diff --git a/node_modules/node-gyp/node_modules/tar/test/pack.js b/node_modules/node-gyp/node_modules/tar/test/pack.js deleted file mode 100644 index 0f16c07bb0162..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/pack.js +++ /dev/null @@ -1,952 +0,0 @@ - -// the symlink file is excluded from git, because it makes -// windows freak the hell out. -var fs = require("fs") - , path = require("path") - , symlink = path.resolve(__dirname, "fixtures/symlink") -try { fs.unlinkSync(symlink) } catch (e) {} -fs.symlinkSync("./hardlink-1", symlink) -process.on("exit", function () { - fs.unlinkSync(symlink) -}) - - -var tap = require("tap") - , tar = require("../tar.js") - , pkg = require("../package.json") - , Pack = tar.Pack - , fstream = require("fstream") - , Reader = fstream.Reader - , Writer = fstream.Writer - , input = path.resolve(__dirname, "fixtures/") - , target = path.resolve(__dirname, "tmp/pack.tar") - , uid = process.getuid ? process.getuid() : 0 - , gid = process.getgid ? process.getgid() : 0 - - , entries = - - // the global header and root fixtures/ dir are going to get - // a different date each time, so omit that bit. - // Also, dev/ino values differ across machines, so that's not - // included. - [ [ 'globalExtendedHeader', - { path: 'PaxHeader/', - mode: 438, - uid: 0, - gid: 0, - type: 'g', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { "NODETAR.author": pkg.author, - "NODETAR.name": pkg.name, - "NODETAR.description": pkg.description, - "NODETAR.version": pkg.version, - "NODETAR.repository.type": pkg.repository.type, - "NODETAR.repository.url": pkg.repository.url, - "NODETAR.main": pkg.main, - "NODETAR.scripts.test": pkg.scripts.test } ] - - , [ 'entry', - { path: 'fixtures/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - uid: uid, - gid: gid, - size: 200, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/a.txt', - mode: 420, - uid: uid, - gid: gid, - size: 257, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/b.txt', - mode: 420, - uid: uid, - gid: gid, - size: 512, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/c.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/cc.txt', - mode: 420, - uid: uid, - gid: gid, - size: 513, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/dir/sub/', - mode: 488, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - - , [ 'entry', - { path: 'fixtures/foo.js', - mode: 420, - uid: uid, - gid: gid, - size: 4, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-1', - mode: 420, - uid: uid, - gid: gid, - size: 200, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/hardlink-2', - mode: 420, - uid: uid, - gid: gid, - size: 0, - type: '1', - linkpath: 'fixtures/hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/omega.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/packtest/star.4.html', - mode: 420, - uid: uid, - gid: gid, - size: 54081, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'fixtures/packtest/Ω.txt', - 'NODETAR.depth': '2', - 'NODETAR.type': 'File', - nlink: 1, - uid: uid, - gid: gid, - size: 2, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/packtest/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '2', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - - , [ 'entry', - { path: 'fixtures/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', - mode: 493, - uid: uid, - gid: gid, - size: 0, - type: '5', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: uid, - gid: gid, - size: 100, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'entry', - { path: 'fixtures/symlink', - uid: uid, - gid: gid, - size: 0, - type: '2', - linkpath: 'hardlink-1', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' } ] - - , [ 'extendedHeader', - { path: 'PaxHeader/fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - type: 'x', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: "fixtures/Ω.txt" - , "NODETAR.depth": "1" - , "NODETAR.type": "File" - , nlink: 1 - , uid: uid - , gid: gid - , size: 2 - , "NODETAR.blksize": "4096" - , "NODETAR.blocks": "8" } ] - - , [ 'entry', - { path: 'fixtures/Ω.txt', - mode: 420, - uid: uid, - gid: gid, - size: 2, - type: '0', - linkpath: '', - ustar: 'ustar\u0000', - ustarver: '00', - uname: '', - gname: '', - devmaj: 0, - devmin: 0, - fill: '', - 'NODETAR.depth': '1', - 'NODETAR.type': 'File', - nlink: 1, - 'NODETAR.blksize': '4096', - 'NODETAR.blocks': '8' } ] - ] - - -// first, make sure that the hardlinks are actually hardlinks, or this -// won't work. Git has a way of replacing them with a copy. -var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") - , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") - , fs = require("fs") - -try { fs.unlinkSync(hard2) } catch (e) {} -fs.linkSync(hard1, hard2) - -tap.test("with global header", { timeout: 10000 }, function (t) { - runTest(t, true) -}) - -tap.test("without global header", { timeout: 10000 }, function (t) { - runTest(t, false) -}) - -tap.test("with from base", { timeout: 10000 }, function (t) { - runTest(t, true, true) -}) - -function alphasort (a, b) { - return a === b ? 0 - : a.toLowerCase() > b.toLowerCase() ? 1 - : a.toLowerCase() < b.toLowerCase() ? -1 - : a > b ? 1 - : -1 -} - - -function runTest (t, doGH, doFromBase) { - var reader = Reader({ path: input - , filter: function () { - return !this.path.match(/\.(tar|hex)$/) - } - , sort: alphasort - }) - - var props = doGH ? pkg : {} - if(doFromBase) props.fromBase = true; - - var pack = Pack(props) - var writer = Writer(target) - - // skip the global header if we're not doing that. - var entry = doGH ? 0 : 1 - - t.ok(reader, "reader ok") - t.ok(pack, "pack ok") - t.ok(writer, "writer ok") - - pack.pipe(writer) - - var parse = tar.Parse() - t.ok(parse, "parser should be ok") - - pack.on("data", function (c) { - // console.error("PACK DATA") - if (c.length !== 512) { - // this one is too noisy, only assert if it'll be relevant - t.equal(c.length, 512, "parser should emit data in 512byte blocks") - } - parse.write(c) - }) - - pack.on("end", function () { - // console.error("PACK END") - t.pass("parser ends") - parse.end() - }) - - pack.on("error", function (er) { - t.fail("pack error", er) - }) - - parse.on("error", function (er) { - t.fail("parse error", er) - }) - - writer.on("error", function (er) { - t.fail("writer error", er) - }) - - reader.on("error", function (er) { - t.fail("reader error", er) - }) - - parse.on("*", function (ev, e) { - var wanted = entries[entry++] - if (!wanted) { - t.fail("unexpected event: "+ev) - return - } - t.equal(ev, wanted[0], "event type should be "+wanted[0]) - - if(doFromBase) { - if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100) - wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc' - - if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/') - if(wanted[1].path == '') wanted[1].path = '/' - if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '') - - wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '') - } - - if (ev !== wanted[0] || e.path !== wanted[1].path) { - console.error("wanted", wanted) - console.error([ev, e.props]) - e.on("end", function () { - console.error(e.fields) - throw "break" - }) - } - - - t.has(e.props, wanted[1], "properties "+wanted[1].path) - if (wanted[2]) { - e.on("end", function () { - if (!e.fields) { - t.ok(e.fields, "should get fields") - } else { - t.has(e.fields, wanted[2], "should get expected fields") - } - }) - } - }) - - reader.pipe(pack) - - writer.on("close", function () { - t.equal(entry, entries.length, "should get all expected entries") - t.pass("it finished") - t.end() - }) - -} diff --git a/node_modules/node-gyp/node_modules/tar/test/parse-discard.js b/node_modules/node-gyp/node_modules/tar/test/parse-discard.js deleted file mode 100644 index da01a65ccc7d7..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/parse-discard.js +++ /dev/null @@ -1,29 +0,0 @@ -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - -tap.test("parser test", function (t) { - var parser = tar.Parse() - var total = 0 - var dataTotal = 0 - - parser.on("end", function () { - - t.equals(total-513,dataTotal,'should have discarded only c.txt') - - t.end() - }) - - fs.createReadStream(file) - .pipe(parser) - .on('entry',function(entry){ - if(entry.path === 'c.txt') entry.abort() - - total += entry.size; - entry.on('data',function(data){ - dataTotal += data.length - }) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/parse.js b/node_modules/node-gyp/node_modules/tar/test/parse.js deleted file mode 100644 index f765a50129bff..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/parse.js +++ /dev/null @@ -1,359 +0,0 @@ -var tap = require("tap") - , tar = require("../tar.js") - , fs = require("fs") - , path = require("path") - , file = path.resolve(__dirname, "fixtures/c.tar") - , index = 0 - - , expect = -[ [ 'entry', - { path: 'c.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 513, - mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'), - cksum: 5422, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'cc.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 513, - mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'), - cksum: 5525, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 100, - mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'), - cksum: 18124, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'entry', - { path: 'Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 2, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 5695, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 120, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 6702, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: 'Ω.txt', - ctime: 1319737909, - atime: 1319739061, - dev: 234881026, - ino: 51693379, - nlink: 1 } ], - [ 'entry', - { path: 'Ω.txt', - mode: 420, - uid: 24561, - gid: 20, - size: 2, - mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - cksum: 5695, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), - atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'), - dev: 234881026, - ino: 51693379, - nlink: 1 }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 353, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14488, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ctime: 1319686868, - atime: 1319741254, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 1 } ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 200, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14570, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 1 }, - undefined ], - [ 'longPath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4976, - type: 'L', - linkpath: '', - ustar: false }, - '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 1000, - gid: 1000, - size: 201, - mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'), - cksum: 14086, - type: '0', - linkpath: '', - ustar: false }, - undefined ], - [ 'longLinkpath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4975, - type: 'K', - linkpath: '', - ustar: false }, - '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], - [ 'longPath', - { path: '././@LongLink', - mode: 0, - uid: 0, - gid: 0, - size: 201, - mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), - cksum: 4976, - type: 'L', - linkpath: '', - ustar: false }, - '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ], - [ 'entry', - { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', - mode: 511, - uid: 1000, - gid: 1000, - size: 0, - mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'), - cksum: 21603, - type: '2', - linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ustar: false }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200-hard', - mode: 420, - uid: 24561, - gid: 20, - size: 143, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 6533, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { ctime: 1320617144, - atime: 1320617232, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 } ], - [ 'entry', - { path: '200-hard', - mode: 420, - uid: 24561, - gid: 20, - size: 200, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 5526, - type: '0', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), - atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 }, - undefined ], - [ 'extendedHeader', - { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 353, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 14488, - type: 'x', - linkpath: '', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '' }, - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - ctime: 1320617144, - atime: 1320617406, - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 } ], - [ 'entry', - { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', - mode: 420, - uid: 24561, - gid: 20, - size: 0, - mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), - cksum: 15173, - type: '1', - linkpath: '200-hard', - ustar: 'ustar\0', - ustarver: '00', - uname: 'isaacs', - gname: 'staff', - devmaj: 0, - devmin: 0, - fill: '', - ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), - atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'), - 'LIBARCHIVE.creationtime': '1319686852', - dev: 234881026, - ino: 51681874, - nlink: 2 }, - undefined ] ] - - -tap.test("parser test", function (t) { - var parser = tar.Parse() - - parser.on("end", function () { - t.equal(index, expect.length, "saw all expected events") - t.end() - }) - - fs.createReadStream(file) - .pipe(parser) - .on("*", function (ev, entry) { - var wanted = expect[index] - if (!wanted) { - return t.fail("Unexpected event: " + ev) - } - var result = [ev, entry.props] - entry.on("end", function () { - result.push(entry.fields || entry.body) - - t.equal(ev, wanted[0], index + " event type") - t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties") - if (wanted[2]) { - t.equivalent(result[2], wanted[2], "metadata values") - } - index ++ - }) - }) -}) diff --git a/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js b/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js deleted file mode 100644 index a00ff7faa0390..0000000000000 --- a/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js +++ /dev/null @@ -1,20 +0,0 @@ -// clean up the fixtures - -var tap = require("tap") -, rimraf = require("rimraf") -, test = tap.test -, path = require("path") - -test("clean fixtures", function (t) { - rimraf(path.resolve(__dirname, "fixtures"), function (er) { - t.ifError(er, "rimraf ./fixtures/") - t.end() - }) -}) - -test("clean tmp", function (t) { - rimraf(path.resolve(__dirname, "tmp"), function (er) { - t.ifError(er, "rimraf ./tmp/") - t.end() - }) -}) diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json index df3a5dfb5e74e..0da5060802f46 100644 --- a/node_modules/node-gyp/package.json +++ b/node_modules/node-gyp/package.json @@ -1,33 +1,29 @@ { - "_from": "node-gyp@3", - "_id": "node-gyp@3.8.0", + "_from": "node-gyp@4.0.0", + "_id": "node-gyp@4.0.0", "_inBundle": false, - "_integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "_integrity": "sha512-2XiryJ8sICNo6ej8d0idXDEMKfVfFK7kekGCtJAuelGsYHQxhj13KTf95swTCN2dZ/4lTfZ84Fu31jqJEEgjWA==", "_location": "/node-gyp", "_phantomChildren": { - "abbrev": "1.1.1", - "block-stream": "0.0.9", - "fstream": "1.0.12", - "inherits": "2.0.3" + "abbrev": "1.1.1" }, "_requested": { - "type": "range", + "type": "version", "registry": true, - "raw": "node-gyp@3", + "raw": "node-gyp@4.0.0", "name": "node-gyp", "escapedName": "node-gyp", - "rawSpec": "3", + "rawSpec": "4.0.0", "saveSpec": null, - "fetchSpec": "3" + "fetchSpec": "4.0.0" }, "_requiredBy": [ "#USER", - "/", - "/npm-lifecycle" + "/" ], - "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", - "_shasum": "540304261c330e80d0d5edce253a68cb3964218c", - "_spec": "node-gyp@3", + "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-4.0.0.tgz", + "_shasum": "972654af4e5dd0cd2a19081b4b46fe0442ba6f45", + "_spec": "node-gyp@4.0.0", "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Nathan Rajlich", @@ -42,7 +38,6 @@ }, "bundleDependencies": false, "dependencies": { - "fstream": "^1.0.0", "glob": "^7.0.3", "graceful-fs": "^4.1.2", "mkdirp": "^0.5.0", @@ -52,7 +47,7 @@ "request": "^2.87.0", "rimraf": "2", "semver": "~5.3.0", - "tar": "^2.0.0", + "tar": "^4.4.8", "which": "1" }, "deprecated": false, @@ -64,7 +59,7 @@ "tape": "~4.2.0" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 4.0.0" }, "homepage": "https://github.com/nodejs/node-gyp#readme", "installVersion": 9, @@ -88,5 +83,5 @@ "scripts": { "test": "tape test/test-*" }, - "version": "3.8.0" + "version": "4.0.0" } diff --git a/node_modules/npm-lifecycle/CHANGELOG.md b/node_modules/npm-lifecycle/CHANGELOG.md index 7e7bc92e2922e..fb74ec450c30f 100644 --- a/node_modules/npm-lifecycle/CHANGELOG.md +++ b/node_modules/npm-lifecycle/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [2.1.1](https://github.com/npm/lifecycle/compare/v2.1.0...v2.1.1) (2019-05-08) + + +### Bug Fixes + +* **test:** update postinstall script for fixture ([220cd70](https://github.com/npm/lifecycle/commit/220cd70)) + + + # [2.1.0](https://github.com/npm/lifecycle/compare/v2.0.3...v2.1.0) (2018-08-13) diff --git a/node_modules/npm-lifecycle/index.js b/node_modules/npm-lifecycle/index.js index 4eb83255a94a2..4a664596047ed 100644 --- a/node_modules/npm-lifecycle/index.js +++ b/node_modules/npm-lifecycle/index.js @@ -167,7 +167,7 @@ function shouldPrependCurrentNodeDirToPATH (opts) { var isWindows = process.platform === 'win32' var foundExecPath try { - foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'}) + foundExecPath = which.sync(path.basename(process.execPath), { pathExt: isWindows ? ';' : ':' }) // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable. isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !== fs.realpathSync(foundExecPath).toUpperCase() diff --git a/node_modules/npm-lifecycle/package.json b/node_modules/npm-lifecycle/package.json index 6f63b06fd51c4..840d44c2556db 100644 --- a/node_modules/npm-lifecycle/package.json +++ b/node_modules/npm-lifecycle/package.json @@ -1,29 +1,30 @@ { - "_from": "npm-lifecycle@2.1.0", - "_id": "npm-lifecycle@2.1.0", + "_from": "npm-lifecycle@^2.1.1", + "_id": "npm-lifecycle@2.1.1", "_inBundle": false, - "_integrity": "sha512-QbBfLlGBKsktwBZLj6AviHC6Q9Y3R/AY4a2PYSIRhSKSS0/CxRyD/PfxEX6tPeOCXQgMSNdwGeECacstgptc+g==", + "_integrity": "sha512-+Vg6I60Z75V/09pdcH5iUo/99Q/vop35PaI99elvxk56azSVVsdsSsS/sXqKDNwbRRNN1qSxkcO45ZOu0yOWew==", "_location": "/npm-lifecycle", "_phantomChildren": {}, "_requested": { - "type": "version", + "type": "range", "registry": true, - "raw": "npm-lifecycle@2.1.0", + "raw": "npm-lifecycle@^2.1.1", "name": "npm-lifecycle", "escapedName": "npm-lifecycle", - "rawSpec": "2.1.0", + "rawSpec": "^2.1.1", "saveSpec": null, - "fetchSpec": "2.1.0" + "fetchSpec": "^2.1.1" }, "_requiredBy": [ "#USER", "/", - "/libcipm" + "/libcipm", + "/libnpm" ], - "_resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.0.tgz", - "_shasum": "1eda2eedb82db929e3a0c50341ab0aad140ed569", - "_spec": "npm-lifecycle@2.1.0", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.1.tgz", + "_shasum": "0027c09646f0fd346c5c93377bdaba59c6748fdf", + "_spec": "npm-lifecycle@^2.1.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Mike Sherov" }, @@ -33,8 +34,8 @@ "bundleDependencies": false, "dependencies": { "byline": "^5.0.0", - "graceful-fs": "^4.1.11", - "node-gyp": "^3.8.0", + "graceful-fs": "^4.1.15", + "node-gyp": "^4.0.0", "resolve-from": "^4.0.0", "slide": "^1.1.6", "uid-number": "0.0.6", @@ -44,11 +45,11 @@ "deprecated": false, "description": "JavaScript package lifecycle hook runner", "devDependencies": { - "nyc": "^12.0.2", - "sinon": "^6.1.5", - "standard": "^11.0.1", + "nyc": "^14.1.0", + "sinon": "^7.2.3", + "standard": "^12.0.1", "standard-version": "^4.4.0", - "tap": "^12.0.1", + "tap": "^13.1.3", "weallbehave": "^1.2.0", "weallcontribute": "^1.0.8" }, @@ -76,9 +77,9 @@ "prerelease": "npm t", "pretest": "standard", "release": "standard-version -s", - "test": "tap -J --coverage test/*.js", + "test": "tap", "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "2.1.0" + "version": "2.1.1" } diff --git a/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/npm-registry-fetch/CHANGELOG.md index 56d849a773037..d24e026914c23 100644 --- a/node_modules/npm-registry-fetch/CHANGELOG.md +++ b/node_modules/npm-registry-fetch/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.9.1](https://github.com/npm/registry-fetch/compare/v3.9.0...v3.9.1) (2019-07-02) + + + # [3.9.0](https://github.com/npm/registry-fetch/compare/v3.8.0...v3.9.0) (2019-01-24) diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md new file mode 100644 index 0000000000000..a446842f55d80 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md @@ -0,0 +1,555 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [4.0.2](https://github.com/zkat/make-fetch-happen/compare/v4.0.1...v4.0.2) (2019-07-02) + + + + +## [4.0.1](https://github.com/zkat/make-fetch-happen/compare/v4.0.0...v4.0.1) (2018-04-12) + + +### Bug Fixes + +* **integrity:** use new sri.match() for verification ([4f371a0](https://github.com/zkat/make-fetch-happen/commit/4f371a0)) + + + + +# [4.0.0](https://github.com/zkat/make-fetch-happen/compare/v3.0.0...v4.0.0) (2018-04-09) + + +### meta + +* drop node@4, add node@9 ([7b0191a](https://github.com/zkat/make-fetch-happen/commit/7b0191a)) + + +### BREAKING CHANGES + +* node@4 is no longer supported + + + + +# [3.0.0](https://github.com/zkat/make-fetch-happen/compare/v2.6.0...v3.0.0) (2018-03-12) + + +### Bug Fixes + +* **license:** switch to ISC ([#49](https://github.com/zkat/make-fetch-happen/issues/49)) ([bf90c6d](https://github.com/zkat/make-fetch-happen/commit/bf90c6d)) +* **standard:** standard@11 update ([ff0aa70](https://github.com/zkat/make-fetch-happen/commit/ff0aa70)) + + +### BREAKING CHANGES + +* **license:** license changed from CC0 to ISC. + + + + +# [2.6.0](https://github.com/zkat/make-fetch-happen/compare/v2.5.0...v2.6.0) (2017-11-14) + + +### Bug Fixes + +* **integrity:** disable node-fetch compress when checking integrity (#42) ([a7cc74c](https://github.com/zkat/make-fetch-happen/commit/a7cc74c)) + + +### Features + +* **onretry:** Add `options.onRetry` (#48) ([f90ccff](https://github.com/zkat/make-fetch-happen/commit/f90ccff)) + + + + +# [2.5.0](https://github.com/zkat/make-fetch-happen/compare/v2.4.13...v2.5.0) (2017-08-24) + + +### Bug Fixes + +* **agent:** support timeout durations greater than 30 seconds ([04875ae](https://github.com/zkat/make-fetch-happen/commit/04875ae)), closes [#35](https://github.com/zkat/make-fetch-happen/issues/35) + + +### Features + +* **cache:** export cache deletion functionality (#40) ([3da4250](https://github.com/zkat/make-fetch-happen/commit/3da4250)) + + + + +## [2.4.13](https://github.com/zkat/make-fetch-happen/compare/v2.4.12...v2.4.13) (2017-06-29) + + +### Bug Fixes + +* **deps:** bump other deps for bugfixes ([eab8297](https://github.com/zkat/make-fetch-happen/commit/eab8297)) +* **proxy:** bump proxy deps with bugfixes (#32) ([632f860](https://github.com/zkat/make-fetch-happen/commit/632f860)), closes [#32](https://github.com/zkat/make-fetch-happen/issues/32) + + + + +## [2.4.12](https://github.com/zkat/make-fetch-happen/compare/v2.4.11...v2.4.12) (2017-06-06) + + +### Bug Fixes + +* **cache:** encode x-local-cache-etc headers to be header-safe ([dc9fb1b](https://github.com/zkat/make-fetch-happen/commit/dc9fb1b)) + + + + +## [2.4.11](https://github.com/zkat/make-fetch-happen/compare/v2.4.10...v2.4.11) (2017-06-05) + + +### Bug Fixes + +* **deps:** bump deps with ssri fix ([bef1994](https://github.com/zkat/make-fetch-happen/commit/bef1994)) + + + + +## [2.4.10](https://github.com/zkat/make-fetch-happen/compare/v2.4.9...v2.4.10) (2017-05-31) + + +### Bug Fixes + +* **deps:** bump dep versions with bugfixes ([0af4003](https://github.com/zkat/make-fetch-happen/commit/0af4003)) +* **proxy:** use auth parameter for proxy authentication (#30) ([c687306](https://github.com/zkat/make-fetch-happen/commit/c687306)) + + + + +## [2.4.9](https://github.com/zkat/make-fetch-happen/compare/v2.4.8...v2.4.9) (2017-05-25) + + +### Bug Fixes + +* **cache:** use the passed-in promise for resolving cache stuff ([4c46257](https://github.com/zkat/make-fetch-happen/commit/4c46257)) + + + + +## [2.4.8](https://github.com/zkat/make-fetch-happen/compare/v2.4.7...v2.4.8) (2017-05-25) + + +### Bug Fixes + +* **cache:** pass uid/gid/Promise through to cache ([a847c92](https://github.com/zkat/make-fetch-happen/commit/a847c92)) + + + + +## [2.4.7](https://github.com/zkat/make-fetch-happen/compare/v2.4.6...v2.4.7) (2017-05-24) + + +### Bug Fixes + +* **deps:** pull in various fixes from deps ([fc2a587](https://github.com/zkat/make-fetch-happen/commit/fc2a587)) + + + + +## [2.4.6](https://github.com/zkat/make-fetch-happen/compare/v2.4.5...v2.4.6) (2017-05-24) + + +### Bug Fixes + +* **proxy:** choose agent for http(s)-proxy by protocol of destUrl ([ea4832a](https://github.com/zkat/make-fetch-happen/commit/ea4832a)) +* **proxy:** make socks proxy working ([1de810a](https://github.com/zkat/make-fetch-happen/commit/1de810a)) +* **proxy:** revert previous proxy solution ([563b0d8](https://github.com/zkat/make-fetch-happen/commit/563b0d8)) + + + + +## [2.4.5](https://github.com/zkat/make-fetch-happen/compare/v2.4.4...v2.4.5) (2017-05-24) + + +### Bug Fixes + +* **proxy:** use the destination url when determining agent ([1a714e7](https://github.com/zkat/make-fetch-happen/commit/1a714e7)) + + + + +## [2.4.4](https://github.com/zkat/make-fetch-happen/compare/v2.4.3...v2.4.4) (2017-05-23) + + +### Bug Fixes + +* **redirect:** handle redirects explicitly (#27) ([4c4af54](https://github.com/zkat/make-fetch-happen/commit/4c4af54)) + + + + +## [2.4.3](https://github.com/zkat/make-fetch-happen/compare/v2.4.2...v2.4.3) (2017-05-06) + + +### Bug Fixes + +* **redirect:** redirects now delete authorization if hosts fail to match ([c071805](https://github.com/zkat/make-fetch-happen/commit/c071805)) + + + + +## [2.4.2](https://github.com/zkat/make-fetch-happen/compare/v2.4.1...v2.4.2) (2017-05-04) + + +### Bug Fixes + +* **cache:** reduce race condition window by checking for content ([24544b1](https://github.com/zkat/make-fetch-happen/commit/24544b1)) +* **match:** Rewrite the conditional stream logic (#25) ([66bba4b](https://github.com/zkat/make-fetch-happen/commit/66bba4b)) + + + + +## [2.4.1](https://github.com/zkat/make-fetch-happen/compare/v2.4.0...v2.4.1) (2017-04-28) + + +### Bug Fixes + +* **memoization:** missed spots + allow passthrough of memo objs ([ac0cd12](https://github.com/zkat/make-fetch-happen/commit/ac0cd12)) + + + + +# [2.4.0](https://github.com/zkat/make-fetch-happen/compare/v2.3.0...v2.4.0) (2017-04-28) + + +### Bug Fixes + +* **memoize:** cacache had a broken memoizer ([8a9ed4c](https://github.com/zkat/make-fetch-happen/commit/8a9ed4c)) + + +### Features + +* **memoization:** only slurp stuff into memory if opts.memoize is not false ([0744adc](https://github.com/zkat/make-fetch-happen/commit/0744adc)) + + + + +# [2.3.0](https://github.com/zkat/make-fetch-happen/compare/v2.2.6...v2.3.0) (2017-04-27) + + +### Features + +* **agent:** added opts.strictSSL and opts.localAddress ([c35015a](https://github.com/zkat/make-fetch-happen/commit/c35015a)) +* **proxy:** Added opts.noProxy and NO_PROXY support ([f45c915](https://github.com/zkat/make-fetch-happen/commit/f45c915)) + + + + +## [2.2.6](https://github.com/zkat/make-fetch-happen/compare/v2.2.5...v2.2.6) (2017-04-26) + + +### Bug Fixes + +* **agent:** check uppercase & lowercase proxy env (#24) ([acf2326](https://github.com/zkat/make-fetch-happen/commit/acf2326)), closes [#22](https://github.com/zkat/make-fetch-happen/issues/22) +* **deps:** switch to node-fetch-npm and stop bundling ([3db603b](https://github.com/zkat/make-fetch-happen/commit/3db603b)) + + + + +## [2.2.5](https://github.com/zkat/make-fetch-happen/compare/v2.2.4...v2.2.5) (2017-04-23) + + +### Bug Fixes + +* **deps:** bump cacache and use its size feature ([926c1d3](https://github.com/zkat/make-fetch-happen/commit/926c1d3)) + + + + +## [2.2.4](https://github.com/zkat/make-fetch-happen/compare/v2.2.3...v2.2.4) (2017-04-18) + + +### Bug Fixes + +* **integrity:** hash verification issues fixed ([07f9402](https://github.com/zkat/make-fetch-happen/commit/07f9402)) + + + + +## [2.2.3](https://github.com/zkat/make-fetch-happen/compare/v2.2.2...v2.2.3) (2017-04-18) + + +### Bug Fixes + +* **staleness:** responses older than 8h were never stale :< ([b54dd75](https://github.com/zkat/make-fetch-happen/commit/b54dd75)) +* **warning:** remove spurious warning, make format more spec-compliant ([2e4f6bb](https://github.com/zkat/make-fetch-happen/commit/2e4f6bb)) + + + + +## [2.2.2](https://github.com/zkat/make-fetch-happen/compare/v2.2.1...v2.2.2) (2017-04-12) + + +### Bug Fixes + +* **retry:** stop retrying 404s ([6fafd53](https://github.com/zkat/make-fetch-happen/commit/6fafd53)) + + + + +## [2.2.1](https://github.com/zkat/make-fetch-happen/compare/v2.2.0...v2.2.1) (2017-04-10) + + +### Bug Fixes + +* **deps:** move test-only deps to devDeps ([2daaf80](https://github.com/zkat/make-fetch-happen/commit/2daaf80)) + + + + +# [2.2.0](https://github.com/zkat/make-fetch-happen/compare/v2.1.0...v2.2.0) (2017-04-09) + + +### Bug Fixes + +* **cache:** treat caches as private ([57b7dc2](https://github.com/zkat/make-fetch-happen/commit/57b7dc2)) + + +### Features + +* **retry:** accept shorthand retry settings ([dfed69d](https://github.com/zkat/make-fetch-happen/commit/dfed69d)) + + + + +# [2.1.0](https://github.com/zkat/make-fetch-happen/compare/v2.0.4...v2.1.0) (2017-04-09) + + +### Features + +* **cache:** cache now obeys Age and a variety of other things (#13) ([7b9652d](https://github.com/zkat/make-fetch-happen/commit/7b9652d)) + + + + +## [2.0.4](https://github.com/zkat/make-fetch-happen/compare/v2.0.3...v2.0.4) (2017-04-09) + + +### Bug Fixes + +* **agent:** accept Request as fetch input, not just strings ([b71669a](https://github.com/zkat/make-fetch-happen/commit/b71669a)) + + + + +## [2.0.3](https://github.com/zkat/make-fetch-happen/compare/v2.0.2...v2.0.3) (2017-04-09) + + +### Bug Fixes + +* **deps:** seriously ([c29e7e7](https://github.com/zkat/make-fetch-happen/commit/c29e7e7)) + + + + +## [2.0.2](https://github.com/zkat/make-fetch-happen/compare/v2.0.1...v2.0.2) (2017-04-09) + + +### Bug Fixes + +* **deps:** use bundleDeps instead ([c36ebf0](https://github.com/zkat/make-fetch-happen/commit/c36ebf0)) + + + + +## [2.0.1](https://github.com/zkat/make-fetch-happen/compare/v2.0.0...v2.0.1) (2017-04-09) + + +### Bug Fixes + +* **deps:** make sure node-fetch tarball included in release ([3bf49d1](https://github.com/zkat/make-fetch-happen/commit/3bf49d1)) + + + + +# [2.0.0](https://github.com/zkat/make-fetch-happen/compare/v1.7.0...v2.0.0) (2017-04-09) + + +### Bug Fixes + +* **deps:** manually pull in newer node-fetch to avoid babel prod dep ([66e5e87](https://github.com/zkat/make-fetch-happen/commit/66e5e87)) +* **retry:** be more specific about when we retry ([a47b782](https://github.com/zkat/make-fetch-happen/commit/a47b782)) + + +### Features + +* **agent:** add ca/cert/key support to auto-agent (#15) ([57585a7](https://github.com/zkat/make-fetch-happen/commit/57585a7)) + + +### BREAKING CHANGES + +* **agent:** pac proxies are no longer supported. +* **retry:** Retry logic has changes. + +* 404s, 420s, and 429s all retry now. +* ENOTFOUND no longer retries. +* Only ECONNRESET, ECONNREFUSED, EADDRINUSE, ETIMEDOUT, and `request-timeout` errors are retried. + + + + +# [1.7.0](https://github.com/zkat/make-fetch-happen/compare/v1.6.0...v1.7.0) (2017-04-08) + + +### Features + +* **cache:** add useful headers to inform users about cached data ([9bd7b00](https://github.com/zkat/make-fetch-happen/commit/9bd7b00)) + + + + +# [1.6.0](https://github.com/zkat/make-fetch-happen/compare/v1.5.1...v1.6.0) (2017-04-06) + + +### Features + +* **agent:** better, keepalive-supporting, default http agents ([16277f6](https://github.com/zkat/make-fetch-happen/commit/16277f6)) + + + + +## [1.5.1](https://github.com/zkat/make-fetch-happen/compare/v1.5.0...v1.5.1) (2017-04-05) + + +### Bug Fixes + +* **cache:** bump cacache for its fixed error messages ([2f2b916](https://github.com/zkat/make-fetch-happen/commit/2f2b916)) +* **cache:** fix handling of errors in cache reads ([5729222](https://github.com/zkat/make-fetch-happen/commit/5729222)) + + + + +# [1.5.0](https://github.com/zkat/make-fetch-happen/compare/v1.4.0...v1.5.0) (2017-04-04) + + +### Features + +* **retry:** retry requests on 408 timeouts, too ([8d8b5bd](https://github.com/zkat/make-fetch-happen/commit/8d8b5bd)) + + + + +# [1.4.0](https://github.com/zkat/make-fetch-happen/compare/v1.3.1...v1.4.0) (2017-04-04) + + +### Bug Fixes + +* **cache:** stop relying on BB.catch ([2b04494](https://github.com/zkat/make-fetch-happen/commit/2b04494)) + + +### Features + +* **retry:** report retry attempt number as extra header ([fd50927](https://github.com/zkat/make-fetch-happen/commit/fd50927)) + + + + +## [1.3.1](https://github.com/zkat/make-fetch-happen/compare/v1.3.0...v1.3.1) (2017-04-04) + + +### Bug Fixes + +* **cache:** pretend cache entry is missing on ENOENT ([9c2bb26](https://github.com/zkat/make-fetch-happen/commit/9c2bb26)) + + + + +# [1.3.0](https://github.com/zkat/make-fetch-happen/compare/v1.2.1...v1.3.0) (2017-04-04) + + +### Bug Fixes + +* **cache:** if metadata is missing for some odd reason, ignore the entry ([a021a6b](https://github.com/zkat/make-fetch-happen/commit/a021a6b)) + + +### Features + +* **cache:** add special headers when request was loaded straight from cache ([8a7dbd1](https://github.com/zkat/make-fetch-happen/commit/8a7dbd1)) +* **cache:** allow configuring algorithms to be calculated on insertion ([bf4a0f2](https://github.com/zkat/make-fetch-happen/commit/bf4a0f2)) + + + + +## [1.2.1](https://github.com/zkat/make-fetch-happen/compare/v1.2.0...v1.2.1) (2017-04-03) + + +### Bug Fixes + +* **integrity:** update cacache and ssri and change EBADCHECKSUM -> EINTEGRITY ([b6cf6f6](https://github.com/zkat/make-fetch-happen/commit/b6cf6f6)) + + + + +# [1.2.0](https://github.com/zkat/make-fetch-happen/compare/v1.1.0...v1.2.0) (2017-04-03) + + +### Features + +* **integrity:** full Subresource Integrity support (#10) ([a590159](https://github.com/zkat/make-fetch-happen/commit/a590159)) + + + + +# [1.1.0](https://github.com/zkat/make-fetch-happen/compare/v1.0.1...v1.1.0) (2017-04-01) + + +### Features + +* **opts:** fetch.defaults() for default options ([522a65e](https://github.com/zkat/make-fetch-happen/commit/522a65e)) + + + + +## [1.0.1](https://github.com/zkat/make-fetch-happen/compare/v1.0.0...v1.0.1) (2017-04-01) + + + + +# 1.0.0 (2017-04-01) + + +### Bug Fixes + +* **cache:** default on cache-control header ([b872a2c](https://github.com/zkat/make-fetch-happen/commit/b872a2c)) +* standard stuff and cache matching ([753f2c2](https://github.com/zkat/make-fetch-happen/commit/753f2c2)) +* **agent:** nudge around things with opts.agent ([ed62b57](https://github.com/zkat/make-fetch-happen/commit/ed62b57)) +* **agent:** {agent: false} has special behavior ([b8cc923](https://github.com/zkat/make-fetch-happen/commit/b8cc923)) +* **cache:** invalidation on non-GET ([fe78fac](https://github.com/zkat/make-fetch-happen/commit/fe78fac)) +* **cache:** make force-cache and only-if-cached work as expected ([f50e9df](https://github.com/zkat/make-fetch-happen/commit/f50e9df)) +* **cache:** more spec compliance ([d5a56db](https://github.com/zkat/make-fetch-happen/commit/d5a56db)) +* **cache:** only cache 200 gets ([0abb25a](https://github.com/zkat/make-fetch-happen/commit/0abb25a)) +* **cache:** only load cache code if cache opt is a string ([250fcd5](https://github.com/zkat/make-fetch-happen/commit/250fcd5)) +* **cache:** oops ([e3fa15a](https://github.com/zkat/make-fetch-happen/commit/e3fa15a)) +* **cache:** refactored warning removal into main file ([5b0a9f9](https://github.com/zkat/make-fetch-happen/commit/5b0a9f9)) +* **cache:** req constructor no longer needed in Cache ([5b74cbc](https://github.com/zkat/make-fetch-happen/commit/5b74cbc)) +* **cache:** standard fetch api calls cacheMode "cache" ([6fba805](https://github.com/zkat/make-fetch-happen/commit/6fba805)) +* **cache:** was using wrong method for non-GET/HEAD cache invalidation ([810763a](https://github.com/zkat/make-fetch-happen/commit/810763a)) +* **caching:** a bunch of cache-related fixes ([8ebda1d](https://github.com/zkat/make-fetch-happen/commit/8ebda1d)) +* **deps:** `cacache[@6](https://github.com/6).3.0` - race condition fixes ([9528442](https://github.com/zkat/make-fetch-happen/commit/9528442)) +* **freshness:** fix regex for cacheControl matching ([070db86](https://github.com/zkat/make-fetch-happen/commit/070db86)) +* **freshness:** fixed default freshness heuristic value ([5d29e88](https://github.com/zkat/make-fetch-happen/commit/5d29e88)) +* **logging:** remove console.log calls ([a1d0a47](https://github.com/zkat/make-fetch-happen/commit/a1d0a47)) +* **method:** node-fetch guarantees uppercase ([a1d68d6](https://github.com/zkat/make-fetch-happen/commit/a1d68d6)) +* **opts:** simplified opts handling ([516fd6e](https://github.com/zkat/make-fetch-happen/commit/516fd6e)) +* **proxy:** pass proxy option directly to ProxyAgent ([3398460](https://github.com/zkat/make-fetch-happen/commit/3398460)) +* **retry:** false -> {retries: 0} ([297fbb6](https://github.com/zkat/make-fetch-happen/commit/297fbb6)) +* **retry:** only retry put if body is not a stream ([a24e599](https://github.com/zkat/make-fetch-happen/commit/a24e599)) +* **retry:** skip retries if body is a stream for ANY method ([780c0f8](https://github.com/zkat/make-fetch-happen/commit/780c0f8)) + + +### Features + +* **api:** initial implementation -- can make and cache requests ([7d55b49](https://github.com/zkat/make-fetch-happen/commit/7d55b49)) +* **fetch:** injectable cache, and retry support ([87b84bf](https://github.com/zkat/make-fetch-happen/commit/87b84bf)) + + +### BREAKING CHANGES + +* **cache:** opts.cache -> opts.cacheManager; opts.cacheMode -> opts.cache +* **fetch:** opts.cache accepts a Cache-like obj or a path. Requests are now retried. +* **api:** actual api implemented diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md new file mode 100644 index 0000000000000..4d12d8dae7e31 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md @@ -0,0 +1,404 @@ +# make-fetch-happen [![npm version](https://img.shields.io/npm/v/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![license](https://img.shields.io/npm/l/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![Travis](https://img.shields.io/travis/zkat/make-fetch-happen.svg)](https://travis-ci.org/zkat/make-fetch-happen) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/make-fetch-happen?svg=true)](https://ci.appveyor.com/project/zkat/make-fetch-happen) [![Coverage Status](https://coveralls.io/repos/github/zkat/make-fetch-happen/badge.svg?branch=latest)](https://coveralls.io/github/zkat/make-fetch-happen?branch=latest) + + +[`make-fetch-happen`](https://github.com/zkat/make-fetch-happen) is a Node.js +library that wraps [`node-fetch-npm`](https://github.com/npm/node-fetch-npm) with additional +features [`node-fetch`](https://github.com/bitinn/node-fetch) doesn't intend to include, including HTTP Cache support, request +pooling, proxies, retries, [and more](#features)! + +## Install + +`$ npm install --save make-fetch-happen` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * [`fetch`](#fetch) + * [`fetch.defaults`](#fetch-defaults) + * [`node-fetch` options](#node-fetch-options) + * [`make-fetch-happen` options](#extra-options) + * [`opts.cacheManager`](#opts-cache-manager) + * [`opts.cache`](#opts-cache) + * [`opts.proxy`](#opts-proxy) + * [`opts.noProxy`](#opts-no-proxy) + * [`opts.ca, opts.cert, opts.key`](#https-opts) + * [`opts.maxSockets`](#opts-max-sockets) + * [`opts.retry`](#opts-retry) + * [`opts.onRetry`](#opts-onretry) + * [`opts.integrity`](#opts-integrity) +* [Message From Our Sponsors](#wow) + +### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cacheManager: './my-cache' // path where cache will be written (and read) +}) + +fetch('https://registry.npmjs.org/make-fetch-happen').then(res => { + return res.json() // download the body as JSON +}).then(body => { + console.log(`got ${body.name} from web`) + return fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'no-cache' // forces a conditional request + }) +}).then(res => { + console.log(res.status) // 304! cache validated! + return res.json().then(body => { + console.log(`got ${body.name} from cache`) + }) +}) +``` + +### Features + +* Builds around [`node-fetch`](https://npm.im/node-fetch) for the core [`fetch` API](https://fetch.spec.whatwg.org) implementation +* Request pooling out of the box +* Quite fast, really +* Automatic HTTP-semantics-aware request retries +* Cache-fallback automatic "offline mode" +* Proxy support (http, https, socks, socks4, socks5) +* Built-in request caching following full HTTP caching rules (`Cache-Control`, `ETag`, `304`s, cache fallback on error, etc). +* Customize cache storage with any [Cache API](https://developer.mozilla.org/en-US/docs/Web/API/Cache)-compliant `Cache` instance. Cache to Redis! +* Node.js Stream support +* Transparent gzip and deflate support +* [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) support +* Literally punches nazis +* (PENDING) Range request caching and resuming + +### Contributing + +The make-fetch-happen team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `> fetch(uriOrRequest, [opts]) -> Promise` + +This function implements most of the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch): given a `uri` string or a `Request` instance, it will fire off an http request and return a Promise containing the relevant response. + +If `opts` is provided, the [`node-fetch`-specific options](#node-fetch-options) will be passed to that library. There are also [additional options](#extra-options) specific to make-fetch-happen that add various features, such as HTTP caching, integrity verification, proxy support, and more. + +##### Example + +```javascript +fetch('https://google.com').then(res => res.buffer()) +``` + +#### `> fetch.defaults([defaultUrl], [defaultOpts])` + +Returns a new `fetch` function that will call `make-fetch-happen` using `defaultUrl` and `defaultOpts` as default values to any calls. + +A defaulted `fetch` will also have a `.defaults()` method, so they can be chained. + +##### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cacheManager: './my-local-cache' +}) + +fetch('https://registry.npmjs.org/make-fetch-happen') // will always use the cache +``` + +#### `> node-fetch options` + +The following options for `node-fetch` are used as-is: + +* method +* body +* redirect +* follow +* timeout +* compress +* size + +These other options are modified or augmented by make-fetch-happen: + +* headers - Default `User-Agent` set to make-fetch happen. `Connection` is set to `keep-alive` or `close` automatically depending on `opts.agent`. +* agent + * If agent is null, an http or https Agent will be automatically used. By default, these will be `http.globalAgent` and `https.globalAgent`. + * If [`opts.proxy`](#opts-proxy) is provided and `opts.agent` is null, the agent will be set to an appropriate proxy-handling agent. + * If `opts.agent` is an object, it will be used as the request-pooling agent argument for this request. + * If `opts.agent` is `false`, it will be passed as-is to the underlying request library. This causes a new Agent to be spawned for every request. + +For more details, see [the documentation for `node-fetch` itself](https://github.com/bitinn/node-fetch#options). + +#### `> make-fetch-happen options` + +make-fetch-happen augments the `node-fetch` API with additional features available through extra options. The following extra options are available: + +* [`opts.cacheManager`](#opts-cache-manager) - Cache target to read/write +* [`opts.cache`](#opts-cache) - `fetch` cache mode. Controls cache *behavior*. +* [`opts.proxy`](#opts-proxy) - Proxy agent +* [`opts.noProxy`](#opts-no-proxy) - Domain segments to disable proxying for. +* [`opts.ca, opts.cert, opts.key, opts.strictSSL`](#https-opts) +* [`opts.localAddress`](#opts-local-address) +* [`opts.maxSockets`](#opts-max-sockets) +* [`opts.retry`](#opts-retry) - Request retry settings +* [`opts.onRetry`](#opts-onretry) - a function called whenever a retry is attempted +* [`opts.integrity`](#opts-integrity) - [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. + +#### `> opts.cacheManager` + +Either a `String` or a `Cache`. If the former, it will be assumed to be a `Path` to be used as the cache root for [`cacache`](https://npm.im/cacache). + +If an object is provided, it will be assumed to be a compliant [`Cache` instance](https://developer.mozilla.org/en-US/docs/Web/API/Cache). Only `Cache.match()`, `Cache.put()`, and `Cache.delete()` are required. Options objects will not be passed in to `match()` or `delete()`. + +By implementing this API, you can customize the storage backend for make-fetch-happen itself -- for example, you could implement a cache that uses `redis` for caching, or simply keeps everything in memory. Most of the caching logic exists entirely on the make-fetch-happen side, so the only thing you need to worry about is reading, writing, and deleting, as well as making sure `fetch.Response` objects are what gets returned. + +You can refer to `cache.js` in the make-fetch-happen source code for a reference implementation. + +**NOTE**: Requests will not be cached unless their response bodies are consumed. You will need to use one of the `res.json()`, `res.buffer()`, etc methods on the response, or drain the `res.body` stream, in order for it to be written. + +The default cache manager also adds the following headers to cached responses: + +* `X-Local-Cache`: Path to the cache the content was found in +* `X-Local-Cache-Key`: Unique cache entry key for this response +* `X-Local-Cache-Hash`: Specific integrity hash for the cached entry +* `X-Local-Cache-Time`: UTCString of the cache insertion time for the entry + +Using [`cacache`](https://npm.im/cacache), a call like this may be used to +manually fetch the cached entry: + +```javascript +const h = response.headers +cacache.get(h.get('x-local-cache'), h.get('x-local-cache-key')) + +// grab content only, directly: +cacache.get.byDigest(h.get('x-local-cache'), h.get('x-local-cache-hash')) +``` + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen', { + cacheManager: './my-local-cache' +}) // -> 200-level response will be written to disk + +fetch('https://npm.im/cacache', { + cacheManager: new MyCustomRedisCache(process.env.PORT) +}) // -> 200-level response will be written to redis +``` + +A possible (minimal) implementation for `MyCustomRedisCache`: + +```javascript +const bluebird = require('bluebird') +const redis = require("redis") +bluebird.promisifyAll(redis.RedisClient.prototype) +class MyCustomRedisCache { + constructor (opts) { + this.redis = redis.createClient(opts) + } + match (req) { + return this.redis.getAsync(req.url).then(res => { + if (res) { + const parsed = JSON.parse(res) + return new fetch.Response(parsed.body, { + url: req.url, + headers: parsed.headers, + status: 200 + }) + } + }) + } + put (req, res) { + return res.buffer().then(body => { + return this.redis.setAsync(req.url, JSON.stringify({ + body: body, + headers: res.headers.raw() + })) + }).then(() => { + // return the response itself + return res + }) + } + 'delete' (req) { + return this.redis.unlinkAsync(req.url) + } +} +``` + +#### `> opts.cache` + +This option follows the standard `fetch` API cache option. This option will do nothing if [`opts.cacheManager`](#opts-cache-manager) is null. The following values are accepted (as strings): + +* `default` - Fetch will inspect the HTTP cache on the way to the network. If there is a fresh response it will be used. If there is a stale response a conditional request will be created, and a normal request otherwise. It then updates the HTTP cache with the response. If the revalidation request fails (for example, on a 500 or if you're offline), the stale response will be returned. +* `no-store` - Fetch behaves as if there is no HTTP cache at all. +* `reload` - Fetch behaves as if there is no HTTP cache on the way to the network. Ergo, it creates a normal request and updates the HTTP cache with the response. +* `no-cache` - Fetch creates a conditional request if there is a response in the HTTP cache and a normal request otherwise. It then updates the HTTP cache with the response. +* `force-cache` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it creates a normal request and updates the HTTP cache with the response. +* `only-if-cached` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it returns a network error. (Can only be used when request’s mode is "same-origin". Any cached redirects will be followed assuming request’s redirect mode is "follow" and the redirects do not violate request’s mode.) + +(Note: option descriptions are taken from https://fetch.spec.whatwg.org/#http-network-or-cache-fetch) + +##### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cacheManager: './my-cache' +}) + +// Will error with ENOTCACHED if we haven't already cached this url +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'only-if-cached' +}) + +// Will refresh any local content and cache the new response +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'reload' +}) + +// Will use any local data, even if stale. Otherwise, will hit network. +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'force-cache' +}) +``` + +#### `> opts.proxy` + +A string or `url.parse`-d URI to proxy through. Different Proxy handlers will be +used depending on the proxy's protocol. + +Additionally, `process.env.HTTP_PROXY`, `process.env.HTTPS_PROXY`, and +`process.env.PROXY` are used if present and no `opts.proxy` value is provided. + +(Pending) `process.env.NO_PROXY` may also be configured to skip proxying requests for all, or specific domains. + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen', { + proxy: 'https://corporate.yourcompany.proxy:4445' +}) + +fetch('https://registry.npmjs.org/make-fetch-happen', { + proxy: { + protocol: 'https:', + hostname: 'corporate.yourcompany.proxy', + port: 4445 + } +}) +``` + +#### `> opts.noProxy` + +If present, should be a comma-separated string or an array of domain extensions +that a proxy should _not_ be used for. + +This option may also be provided through `process.env.NO_PROXY`. + +#### `> opts.ca, opts.cert, opts.key, opts.strictSSL` + +These values are passed in directly to the HTTPS agent and will be used for both +proxied and unproxied outgoing HTTPS requests. They mostly correspond to the +same options the `https` module accepts, which will be themselves passed to +`tls.connect()`. `opts.strictSSL` corresponds to `rejectUnauthorized`. + +#### `> opts.localAddress` + +Passed directly to `http` and `https` request calls. Determines the local +address to bind to. + +#### `> opts.maxSockets` + +Default: 15 + +Maximum number of active concurrent sockets to use for the underlying +Http/Https/Proxy agents. This setting applies once per spawned agent. + +15 is probably a _pretty good value_ for most use-cases, and balances speed +with, uh, not knocking out people's routers. 🤓 + +#### `> opts.retry` + +An object that can be used to tune request retry settings. Retries will only be attempted on the following conditions: + +* Request method is NOT `POST` AND +* Request status is one of: `408`, `420`, `429`, or any status in the 500-range. OR +* Request errored with `ECONNRESET`, `ECONNREFUSED`, `EADDRINUSE`, `ETIMEDOUT`, or the `fetch` error `request-timeout`. + +The following are worth noting as explicitly not retried: + +* `getaddrinfo ENOTFOUND` and will be assumed to be either an unreachable domain or the user will be assumed offline. If a response is cached, it will be returned immediately. +* `ECONNRESET` currently has no support for restarting. It will eventually be supported but requires a bit more juggling due to streaming. + +If `opts.retry` is `false`, it is equivalent to `{retries: 0}` + +If `opts.retry` is a number, it is equivalent to `{retries: num}` + +The following retry options are available if you want more control over it: + +* retries +* factor +* minTimeout +* maxTimeout +* randomize + +For details on what each of these do, refer to the [`retry`](https://npm.im/retry) documentation. + +##### Example + +```javascript +fetch('https://flaky.site.com', { + retry: { + retries: 10, + randomize: true + } +}) + +fetch('http://reliable.site.com', { + retry: false +}) + +fetch('http://one-more.site.com', { + retry: 3 +}) +``` + +#### `> opts.onRetry` + +A function called whenever a retry is attempted. + +##### Example + +```javascript +fetch('https://flaky.site.com', { + onRetry() { + console.log('we will retry!') + } +}) +``` + +#### `> opts.integrity` + +Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EINTEGRITY` error. + +`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like. + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { + integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' +}) // -> ok + +fetch('https://malicious-registry.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { + integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' +}) // Error: EINTEGRITY +``` + +### Message From Our Sponsors + +![](stop.gif) + +![](happening.gif) diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js new file mode 100644 index 0000000000000..55675946ad97d --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js @@ -0,0 +1,171 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') + +let AGENT_CACHE = new LRU({ max: 50 }) +let HttpsAgent +let HttpAgent + +module.exports = getAgent + +function getAgent (uri, opts) { + const parsedUri = url.parse(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(uri, opts) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}` + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + if (AGENT_CACHE.peek(key)) { + return AGENT_CACHE.get(key) + } + + if (pxuri) { + const proxy = getProxy(pxuri, opts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + if (isHttps && !HttpsAgent) { + HttpsAgent = require('agentkeepalive').HttpsAgent + } else if (!isHttps && !HttpAgent) { + HttpAgent = require('agentkeepalive') + } + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1 + + const agent = isHttps ? new HttpsAgent({ + maxSockets: opts.maxSockets || 15, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.strictSSL, + timeout: agentTimeout + }) : new HttpAgent({ + maxSockets: opts.maxSockets || 15, + localAddress: opts.localAddress, + timeout: agentTimeout + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = url.parse(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') { + noproxy = noproxy.split(/\s*,\s*/g) + } + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) { return false } + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) { + return false + } + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) { return } + + let value + + if (Array.isArray(env)) { + for (let e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') { break } + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +function getProxyUri (uri, opts) { + const protocol = url.parse(uri).protocol + + const proxy = opts.proxy || ( + protocol === 'https:' && getProcessEnv('https_proxy') + ) || ( + protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) { return null } + + const parsedProxy = (typeof proxy === 'string') ? url.parse(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +let HttpProxyAgent +let HttpsProxyAgent +let SocksProxyAgent +function getProxy (proxyUrl, opts, isHttps) { + let popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: proxyUrl.path, + auth: proxyUrl.auth, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: opts.timeout === 0 ? 0 : opts.timeout + 1, + localAddress: opts.localAddress, + maxSockets: opts.maxSockets || 15, + rejectUnauthorized: opts.strictSSL + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) { + if (!HttpProxyAgent) { + HttpProxyAgent = require('http-proxy-agent') + } + + return new HttpProxyAgent(popts) + } else { + if (!HttpsProxyAgent) { + HttpsProxyAgent = require('https-proxy-agent') + } + + return new HttpsProxyAgent(popts) + } + } + if (proxyUrl.protocol.startsWith('socks')) { + if (!SocksProxyAgent) { + SocksProxyAgent = require('socks-proxy-agent') + } + + return new SocksProxyAgent(popts) + } +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js new file mode 100644 index 0000000000000..0c519e69fbe81 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js @@ -0,0 +1,249 @@ +'use strict' + +const cacache = require('cacache') +const fetch = require('node-fetch-npm') +const pipe = require('mississippi').pipe +const ssri = require('ssri') +const through = require('mississippi').through +const to = require('mississippi').to +const url = require('url') +const stream = require('stream') + +const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB + +function cacheKey (req) { + const parsed = url.parse(req.url) + return `make-fetch-happen:request-cache:${ + url.format({ + protocol: parsed.protocol, + slashes: parsed.slashes, + host: parsed.host, + hostname: parsed.hostname, + pathname: parsed.pathname + }) + }` +} + +// This is a cacache-based implementation of the Cache standard, +// using node-fetch. +// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache +// +module.exports = class Cache { + constructor (path, opts) { + this._path = path + this._uid = opts && opts.uid + this._gid = opts && opts.gid + this.Promise = (opts && opts.Promise) || Promise + } + + // Returns a Promise that resolves to the response associated with the first + // matching request in the Cache object. + match (req, opts) { + opts = opts || {} + const key = cacheKey(req) + return cacache.get.info(this._path, key).then(info => { + return info && cacache.get.hasContent( + this._path, info.integrity, opts + ).then(exists => exists && info) + }).then(info => { + if (info && info.metadata && matchDetails(req, { + url: info.metadata.url, + reqHeaders: new fetch.Headers(info.metadata.reqHeaders), + resHeaders: new fetch.Headers(info.metadata.resHeaders), + cacheIntegrity: info.integrity, + integrity: opts && opts.integrity + })) { + const resHeaders = new fetch.Headers(info.metadata.resHeaders) + addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time) + if (req.method === 'HEAD') { + return new fetch.Response(null, { + url: req.url, + headers: resHeaders, + status: 200 + }) + } + let body + const cachePath = this._path + // avoid opening cache file handles until a user actually tries to + // read from it. + if (opts.memoize !== false && info.size > MAX_MEM_SIZE) { + body = new stream.PassThrough() + const realRead = body._read + body._read = function (size) { + body._read = realRead + pipe( + cacache.get.stream.byDigest(cachePath, info.integrity, { + memoize: opts.memoize + }), + body, + err => body.emit(err)) + return realRead.call(this, size) + } + } else { + let readOnce = false + // cacache is much faster at bulk reads + body = new stream.Readable({ + read () { + if (readOnce) return this.push(null) + readOnce = true + cacache.get.byDigest(cachePath, info.integrity, { + memoize: opts.memoize + }).then(data => { + this.push(data) + this.push(null) + }, err => this.emit('error', err)) + } + }) + } + return this.Promise.resolve(new fetch.Response(body, { + url: req.url, + headers: resHeaders, + status: 200, + size: info.size + })) + } + }) + } + + // Takes both a request and its response and adds it to the given cache. + put (req, response, opts) { + opts = opts || {} + const size = response.headers.get('content-length') + const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE + const ckey = cacheKey(req) + const cacheOpts = { + algorithms: opts.algorithms, + metadata: { + url: req.url, + reqHeaders: req.headers.raw(), + resHeaders: response.headers.raw() + }, + uid: this._uid, + gid: this._gid, + size, + memoize: fitInMemory && opts.memoize + } + if (req.method === 'HEAD' || response.status === 304) { + // Update metadata without writing + return cacache.get.info(this._path, ckey).then(info => { + // Providing these will bypass content write + cacheOpts.integrity = info.integrity + addCacheHeaders( + response.headers, this._path, ckey, info.integrity, info.time + ) + return new this.Promise((resolve, reject) => { + pipe( + cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts), + cacache.put.stream(this._path, cacheKey(req), cacheOpts), + err => err ? reject(err) : resolve(response) + ) + }) + }).then(() => response) + } + let buf = [] + let bufSize = 0 + let cacheTargetStream = false + const cachePath = this._path + let cacheStream = to((chunk, enc, cb) => { + if (!cacheTargetStream) { + if (fitInMemory) { + cacheTargetStream = + to({highWaterMark: MAX_MEM_SIZE}, (chunk, enc, cb) => { + buf.push(chunk) + bufSize += chunk.length + cb() + }, done => { + cacache.put( + cachePath, + cacheKey(req), + Buffer.concat(buf, bufSize), + cacheOpts + ).then( + () => done(), + done + ) + }) + } else { + cacheTargetStream = + cacache.put.stream(cachePath, cacheKey(req), cacheOpts) + } + } + cacheTargetStream.write(chunk, enc, cb) + }, done => { + cacheTargetStream ? cacheTargetStream.end(done) : done() + }) + const oldBody = response.body + const newBody = through({highWaterMark: fitInMemory && MAX_MEM_SIZE}) + response.body = newBody + oldBody.once('error', err => newBody.emit('error', err)) + newBody.once('error', err => oldBody.emit('error', err)) + cacheStream.once('error', err => newBody.emit('error', err)) + pipe(oldBody, to((chunk, enc, cb) => { + cacheStream.write(chunk, enc, () => { + newBody.write(chunk, enc, cb) + }) + }, done => { + cacheStream.end(() => { + newBody.end(() => { + done() + }) + }) + }), err => err && newBody.emit('error', err)) + return response + } + + // Finds the Cache entry whose key is the request, and if found, deletes the + // Cache entry and returns a Promise that resolves to true. If no Cache entry + // is found, it returns false. + 'delete' (req, opts) { + opts = opts || {} + if (typeof opts.memoize === 'object') { + if (opts.memoize.reset) { + opts.memoize.reset() + } else if (opts.memoize.clear) { + opts.memoize.clear() + } else { + Object.keys(opts.memoize).forEach(k => { + opts.memoize[k] = null + }) + } + } + return cacache.rm.entry( + this._path, + cacheKey(req) + // TODO - true/false + ).then(() => false) + } +} + +function matchDetails (req, cached) { + const reqUrl = url.parse(req.url) + const cacheUrl = url.parse(cached.url) + const vary = cached.resHeaders.get('Vary') + // https://tools.ietf.org/html/rfc7234#section-4.1 + if (vary) { + if (vary.match(/\*/)) { + return false + } else { + const fieldsMatch = vary.split(/\s*,\s*/).every(field => { + return cached.reqHeaders.get(field) === req.headers.get(field) + }) + if (!fieldsMatch) { + return false + } + } + } + if (cached.integrity) { + return ssri.parse(cached.integrity).match(cached.cacheIntegrity) + } + reqUrl.hash = null + cacheUrl.hash = null + return url.format(reqUrl) === url.format(cacheUrl) +} + +function addCacheHeaders (resHeaders, path, key, hash, time) { + resHeaders.set('X-Local-Cache', encodeURIComponent(path)) + resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key)) + resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash)) + resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString()) +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js new file mode 100644 index 0000000000000..0f2c164e19f28 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js @@ -0,0 +1,482 @@ +'use strict' + +let Cache +const url = require('url') +const CachePolicy = require('http-cache-semantics') +const fetch = require('node-fetch-npm') +const pkg = require('./package.json') +const retry = require('promise-retry') +let ssri +const Stream = require('stream') +const getAgent = require('./agent') +const setWarning = require('./warning') + +const isURL = /^https?:/ +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT' // someone in the transaction is WAY TOO SLOW + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout' +] + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +module.exports = cachingFetch +cachingFetch.defaults = function (_uri, _opts) { + const fetch = this + if (typeof _uri === 'object') { + _opts = _uri + _uri = null + } + + function defaultedFetch (uri, opts) { + const finalOpts = Object.assign({}, _opts || {}, opts || {}) + return fetch(uri || _uri, finalOpts) + } + + defaultedFetch.defaults = fetch.defaults + defaultedFetch.delete = fetch.delete + return defaultedFetch +} + +cachingFetch.delete = cacheDelete +function cacheDelete (uri, opts) { + opts = configureOptions(opts) + if (opts.cacheManager) { + const req = new fetch.Request(uri, { + method: opts.method, + headers: opts.headers + }) + return opts.cacheManager.delete(req, opts) + } +} + +function initializeCache (opts) { + if (typeof opts.cacheManager === 'string') { + if (!Cache) { + // Default cacache-based cache + Cache = require('./cache') + } + + opts.cacheManager = new Cache(opts.cacheManager, opts) + } + + opts.cache = opts.cache || 'default' + + if (opts.cache === 'default' && isHeaderConditional(opts.headers)) { + // If header list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache + // mode to "no-store" if it is "default". + opts.cache = 'no-store' + } +} + +function configureOptions (_opts) { + const opts = Object.assign({}, _opts || {}) + opts.method = (opts.method || 'GET').toUpperCase() + + if (opts.retry && typeof opts.retry === 'number') { + opts.retry = { retries: opts.retry } + } + + if (opts.retry === false) { + opts.retry = { retries: 0 } + } + + if (opts.cacheManager) { + initializeCache(opts) + } + + return opts +} + +function initializeSsri () { + if (!ssri) { + ssri = require('ssri') + } +} + +function cachingFetch (uri, _opts) { + const opts = configureOptions(_opts) + + if (opts.integrity) { + initializeSsri() + // if verifying integrity, node-fetch must not decompress + opts.compress = false + } + + const isCachable = (opts.method === 'GET' || opts.method === 'HEAD') && + opts.cacheManager && + opts.cache !== 'no-store' && + opts.cache !== 'reload' + + if (isCachable) { + const req = new fetch.Request(uri, { + method: opts.method, + headers: opts.headers + }) + + return opts.cacheManager.match(req, opts).then(res => { + if (res) { + const warningCode = (res.headers.get('Warning') || '').match(/^\d+/) + if (warningCode && +warningCode >= 100 && +warningCode < 200) { + // https://tools.ietf.org/html/rfc7234#section-4.3.4 + // + // If a stored response is selected for update, the cache MUST: + // + // * delete any Warning header fields in the stored response with + // warn-code 1xx (see Section 5.5); + // + // * retain any Warning header fields in the stored response with + // warn-code 2xx; + // + res.headers.delete('Warning') + } + + if (opts.cache === 'default' && !isStale(req, res)) { + return res + } + + if (opts.cache === 'default' || opts.cache === 'no-cache') { + return conditionalFetch(req, res, opts) + } + + if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') { + // 112 Disconnected operation + // SHOULD be included if the cache is intentionally disconnected from + // the rest of the network for a period of time. + // (https://tools.ietf.org/html/rfc2616#section-14.46) + setWarning(res, 112, 'Disconnected operation') + return res + } + } + + if (!res && opts.cache === 'only-if-cached') { + const errorMsg = `request to ${ + uri + } failed: cache mode is 'only-if-cached' but no cached response available.` + + const err = new Error(errorMsg) + err.code = 'ENOTCACHED' + throw err + } + + // Missing cache entry, or mode is default (if stale), reload, no-store + return remoteFetch(req.url, opts) + }) + } + + return remoteFetch(uri, opts) +} + +function iterableToObject (iter) { + const obj = {} + for (let k of iter.keys()) { + obj[k] = iter.get(k) + } + return obj +} + +function makePolicy (req, res) { + const _req = { + url: req.url, + method: req.method, + headers: iterableToObject(req.headers) + } + const _res = { + status: res.status, + headers: iterableToObject(res.headers) + } + + return new CachePolicy(_req, _res, { shared: false }) +} + +// https://tools.ietf.org/html/rfc7234#section-4.2 +function isStale (req, res) { + if (!res) { + return null + } + + const _req = { + url: req.url, + method: req.method, + headers: iterableToObject(req.headers) + } + + const policy = makePolicy(req, res) + + const responseTime = res.headers.get('x-local-cache-time') || + res.headers.get('date') || + 0 + + policy._responseTime = new Date(responseTime) + + const bool = !policy.satisfiesWithoutRevalidation(_req) + return bool +} + +function mustRevalidate (res) { + return (res.headers.get('cache-control') || '').match(/must-revalidate/i) +} + +function conditionalFetch (req, cachedRes, opts) { + const _req = { + url: req.url, + method: req.method, + headers: Object.assign({}, opts.headers || {}) + } + + const policy = makePolicy(req, cachedRes) + opts.headers = policy.revalidationHeaders(_req) + + return remoteFetch(req.url, opts) + .then(condRes => { + const revalidatedPolicy = policy.revalidatedPolicy(_req, { + status: condRes.status, + headers: iterableToObject(condRes.headers) + }) + + if (condRes.status >= 500 && !mustRevalidate(cachedRes)) { + // 111 Revalidation failed + // MUST be included if a cache returns a stale response because an + // attempt to revalidate the response failed, due to an inability to + // reach the server. + // (https://tools.ietf.org/html/rfc2616#section-14.46) + setWarning(cachedRes, 111, 'Revalidation failed') + return cachedRes + } + + if (condRes.status === 304) { // 304 Not Modified + condRes.body = cachedRes.body + return opts.cacheManager.put(req, condRes, opts) + .then(newRes => { + newRes.headers = new fetch.Headers(revalidatedPolicy.policy.responseHeaders()) + return newRes + }) + } + + return condRes + }) + .then(res => res) + .catch(err => { + if (mustRevalidate(cachedRes)) { + throw err + } else { + // 111 Revalidation failed + // MUST be included if a cache returns a stale response because an + // attempt to revalidate the response failed, due to an inability to + // reach the server. + // (https://tools.ietf.org/html/rfc2616#section-14.46) + setWarning(cachedRes, 111, 'Revalidation failed') + // 199 Miscellaneous warning + // The warning text MAY include arbitrary information to be presented to + // a human user, or logged. A system receiving this warning MUST NOT take + // any automated action, besides presenting the warning to the user. + // (https://tools.ietf.org/html/rfc2616#section-14.46) + setWarning( + cachedRes, + 199, + `Miscellaneous Warning ${err.code}: ${err.message}` + ) + + return cachedRes + } + }) +} + +function remoteFetchHandleIntegrity (res, integrity) { + const oldBod = res.body + const newBod = ssri.integrityStream({ + integrity + }) + oldBod.pipe(newBod) + res.body = newBod + oldBod.once('error', err => { + newBod.emit('error', err) + }) + newBod.once('error', err => { + oldBod.emit('error', err) + }) +} + +function remoteFetch (uri, opts) { + const agent = getAgent(uri, opts) + const headers = Object.assign({ + 'connection': agent ? 'keep-alive' : 'close', + 'user-agent': USER_AGENT + }, opts.headers || {}) + + const reqOpts = { + agent, + body: opts.body, + compress: opts.compress, + follow: opts.follow, + headers: new fetch.Headers(headers), + method: opts.method, + redirect: 'manual', + size: opts.size, + counter: opts.counter, + timeout: opts.timeout + } + + return retry( + (retryHandler, attemptNum) => { + const req = new fetch.Request(uri, reqOpts) + return fetch(req) + .then(res => { + res.headers.set('x-fetch-attempts', attemptNum) + + if (opts.integrity) { + remoteFetchHandleIntegrity(res, opts.integrity) + } + + const isStream = req.body instanceof Stream + + if (opts.cacheManager) { + const isMethodGetHead = req.method === 'GET' || + req.method === 'HEAD' + + const isCachable = opts.cache !== 'no-store' && + isMethodGetHead && + makePolicy(req, res).storable() && + res.status === 200 // No other statuses should be stored! + + if (isCachable) { + return opts.cacheManager.put(req, res, opts) + } + + if (!isMethodGetHead) { + return opts.cacheManager.delete(req).then(() => { + if (res.status >= 500 && req.method !== 'POST' && !isStream) { + if (typeof opts.onRetry === 'function') { + opts.onRetry(res) + } + + return retryHandler(res) + } + + return res + }) + } + } + + const isRetriable = req.method !== 'POST' && + !isStream && ( + res.status === 408 || // Request Timeout + res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit) + res.status === 429 || // Too Many Requests ("standard" rate-limiting) + res.status >= 500 // Assume server errors are momentary hiccups + ) + + if (isRetriable) { + if (typeof opts.onRetry === 'function') { + opts.onRetry(res) + } + + return retryHandler(res) + } + + if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') { + return res + } + + // handle redirects - matches behavior of npm-fetch: https://github.com/bitinn/node-fetch + if (opts.redirect === 'error') { + const err = new Error(`redirect mode is set to error: ${uri}`) + err.code = 'ENOREDIRECT' + throw err + } + + if (!res.headers.get('location')) { + const err = new Error(`redirect location header missing at: ${uri}`) + err.code = 'EINVALIDREDIRECT' + throw err + } + + if (req.counter >= req.follow) { + const err = new Error(`maximum redirect reached at: ${uri}`) + err.code = 'EMAXREDIRECT' + throw err + } + + const resolvedUrl = url.resolve(req.url, res.headers.get('location')) + let redirectURL = url.parse(resolvedUrl) + + if (isURL.test(res.headers.get('location'))) { + redirectURL = url.parse(res.headers.get('location')) + } + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (url.parse(req.url).hostname !== redirectURL.hostname) { + req.headers.delete('authorization') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if (res.status === 303 || + ((res.status === 301 || res.status === 302) && req.method === 'POST')) { + opts.method = 'GET' + opts.body = null + req.headers.delete('content-length') + } + + opts.headers = {} + req.headers.forEach((value, name) => { + opts.headers[name] = value + }) + + opts.counter = ++req.counter + return cachingFetch(resolvedUrl, opts) + }) + .catch(err => { + const code = err.code === 'EPROMISERETRY' ? err.retried.code : err.code + + const isRetryError = RETRY_ERRORS.indexOf(code) === -1 && + RETRY_TYPES.indexOf(err.type) === -1 + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof opts.onRetry === 'function') { + opts.onRetry(err) + } + + return retryHandler(err) + }) + }, + opts.retry + ).catch(err => { + if (err.status >= 400) { + return err + } + + throw err + }) +} + +function isHeaderConditional (headers) { + if (!headers || typeof headers !== 'object') { + return false + } + + const modifiers = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range' + ] + + return Object.keys(headers) + .some(h => modifiers.indexOf(h.toLowerCase()) !== -1) +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000000000..0f5adaa3476c9 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json @@ -0,0 +1,94 @@ +{ + "_from": "make-fetch-happen@^4.0.2", + "_id": "make-fetch-happen@4.0.2", + "_inBundle": false, + "_integrity": "sha512-YMJrAjHSb/BordlsDEcVcPyTbiJKkzqMf48N8dAJZT9Zjctrkb6Yg4TY9Sq2AwSIQJFn5qBBKVTYt3vP5FMIHA==", + "_location": "/npm-registry-fetch/make-fetch-happen", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "make-fetch-happen@^4.0.2", + "name": "make-fetch-happen", + "escapedName": "make-fetch-happen", + "rawSpec": "^4.0.2", + "saveSpec": null, + "fetchSpec": "^4.0.2" + }, + "_requiredBy": [ + "/npm-registry-fetch" + ], + "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.2.tgz", + "_shasum": "2d156b11696fb32bffbafe1ac1bc085dd6c78a79", + "_spec": "make-fetch-happen@^4.0.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/npm-registry-fetch", + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech" + }, + "bugs": { + "url": "https://github.com/zkat/make-fetch-happen/issues" + }, + "bundleDependencies": false, + "dependencies": { + "agentkeepalive": "^3.4.1", + "cacache": "^11.3.3", + "http-cache-semantics": "^3.8.1", + "http-proxy-agent": "^2.1.0", + "https-proxy-agent": "^2.2.1", + "lru-cache": "^5.1.1", + "mississippi": "^3.0.0", + "node-fetch-npm": "^2.0.2", + "promise-retry": "^1.1.1", + "socks-proxy-agent": "^4.0.0", + "ssri": "^6.0.0" + }, + "deprecated": false, + "description": "Opinionated, caching, retrying fetch client", + "devDependencies": { + "bluebird": "^3.5.1", + "mkdirp": "^0.5.1", + "nock": "^9.2.3", + "npmlog": "^4.1.2", + "require-inject": "^1.4.2", + "rimraf": "^2.6.2", + "safe-buffer": "^5.1.1", + "standard": "^11.0.1", + "standard-version": "^4.3.0", + "tacks": "^1.2.6", + "tap": "^12.7.0", + "weallbehave": "^1.0.0", + "weallcontribute": "^1.0.7" + }, + "files": [ + "*.js", + "lib" + ], + "homepage": "https://github.com/zkat/make-fetch-happen#readme", + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "license": "ISC", + "main": "index.js", + "name": "make-fetch-happen", + "repository": { + "type": "git", + "url": "git+https://github.com/zkat/make-fetch-happen.git" + }, + "scripts": { + "postrelease": "npm publish && git push --follow-tags", + "prerelease": "npm t", + "pretest": "standard", + "release": "standard-version -s", + "test": "tap --coverage --nyc-arg=--all --timeout=35 -J test/*.js", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" + }, + "version": "4.0.2" +} diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js new file mode 100644 index 0000000000000..b8f13cf83195a --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js @@ -0,0 +1,24 @@ +const url = require('url') + +module.exports = setWarning + +function setWarning (reqOrRes, code, message, replace) { + // Warning = "Warning" ":" 1#warning-value + // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date] + // warn-code = 3DIGIT + // warn-agent = ( host [ ":" port ] ) | pseudonym + // ; the name or pseudonym of the server adding + // ; the Warning header, for use in debugging + // warn-text = quoted-string + // warn-date = <"> HTTP-date <"> + // (https://tools.ietf.org/html/rfc2616#section-14.46) + const host = url.parse(reqOrRes.url).host + const jsonMessage = JSON.stringify(message) + const jsonDate = JSON.stringify(new Date().toUTCString()) + const header = replace ? 'set' : 'append' + + reqOrRes.headers[header]( + 'Warning', + `${code} ${host} ${jsonMessage} ${jsonDate}` + ) +} diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json index 0ffa9a8b9e823..8502fb763f235 100644 --- a/node_modules/npm-registry-fetch/package.json +++ b/node_modules/npm-registry-fetch/package.json @@ -1,19 +1,31 @@ { - "_from": "npm-registry-fetch@latest", - "_id": "npm-registry-fetch@3.9.0", + "_from": "npm-registry-fetch@3.9.1", + "_id": "npm-registry-fetch@3.9.1", "_inBundle": false, - "_integrity": "sha512-srwmt8YhNajAoSAaDWndmZgx89lJwIZ1GWxOuckH4Coek4uHv5S+o/l9FLQe/awA+JwTnj4FJHldxhlXdZEBmw==", + "_integrity": "sha512-VQCEZlydXw4AwLROAXWUR7QDfe2Y8Id/vpAgp6TI1/H78a4SiQ1kQrKZALm5/zxM5n4HIi+aYb+idUAV/RuY0Q==", "_location": "/npm-registry-fetch", - "_phantomChildren": {}, + "_phantomChildren": { + "agentkeepalive": "3.4.1", + "cacache": "11.3.3", + "http-cache-semantics": "3.8.1", + "http-proxy-agent": "2.1.0", + "https-proxy-agent": "2.2.1", + "lru-cache": "5.1.1", + "mississippi": "3.0.0", + "node-fetch-npm": "2.0.2", + "promise-retry": "1.1.1", + "socks-proxy-agent": "4.0.1", + "ssri": "6.0.1" + }, "_requested": { - "type": "tag", + "type": "version", "registry": true, - "raw": "npm-registry-fetch@latest", + "raw": "npm-registry-fetch@3.9.1", "name": "npm-registry-fetch", "escapedName": "npm-registry-fetch", - "rawSpec": "latest", + "rawSpec": "3.9.1", "saveSpec": null, - "fetchSpec": "latest" + "fetchSpec": "3.9.1" }, "_requiredBy": [ "#USER", @@ -28,10 +40,10 @@ "/npm-profile", "/pacote" ], - "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.9.0.tgz", - "_shasum": "44d841780e2833f06accb34488f8c7450d1a6856", - "_spec": "npm-registry-fetch@latest", - "_where": "/Users/zkat/Documents/code/work/npm", + "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.9.1.tgz", + "_shasum": "00ff6e4e35d3f75a172b332440b53e93f4cb67de", + "_spec": "npm-registry-fetch@3.9.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Kat Marchán", "email": "kzm@sykosomatic.org" @@ -52,14 +64,14 @@ "JSONStream": "^1.3.4", "bluebird": "^3.5.1", "figgy-pudding": "^3.4.1", - "lru-cache": "^4.1.3", - "make-fetch-happen": "^4.0.1", + "lru-cache": "^5.1.1", + "make-fetch-happen": "^4.0.2", "npm-package-arg": "^6.1.0" }, "deprecated": false, "description": "Fetch-based http client for use with npm registry APIs", "devDependencies": { - "cacache": "^11.0.2", + "cacache": "^11.3.3", "get-stream": "^4.0.0", "mkdirp": "^0.5.1", "nock": "^9.4.3", @@ -98,5 +110,5 @@ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" }, - "version": "3.9.0" + "version": "3.9.1" } diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/pacote/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/lru-cache/README.md b/node_modules/pacote/node_modules/lru-cache/README.md deleted file mode 100644 index 435dfebb7e27d..0000000000000 --- a/node_modules/pacote/node_modules/lru-cache/README.md +++ /dev/null @@ -1,166 +0,0 @@ -# lru cache - -A cache object that deletes the least-recently-used items. - -[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) - -## Installation: - -```javascript -npm install lru-cache --save -``` - -## Usage: - -```javascript -var LRU = require("lru-cache") - , options = { max: 500 - , length: function (n, key) { return n * 2 + key.length } - , dispose: function (key, n) { n.close() } - , maxAge: 1000 * 60 * 60 } - , cache = new LRU(options) - , otherCache = new LRU(50) // sets just the max size - -cache.set("key", "value") -cache.get("key") // "value" - -// non-string keys ARE fully supported -// but note that it must be THE SAME object, not -// just a JSON-equivalent object. -var someObject = { a: 1 } -cache.set(someObject, 'a value') -// Object keys are not toString()-ed -cache.set('[object Object]', 'a different value') -assert.equal(cache.get(someObject), 'a value') -// A similar object with same keys/values won't work, -// because it's a different object identity -assert.equal(cache.get({ a: 1 }), undefined) - -cache.reset() // empty the cache -``` - -If you put more stuff in it, then items will fall out. - -If you try to put an oversized thing in it, then it'll fall out right -away. - -## Options - -* `max` The maximum size of the cache, checked by applying the length - function to all values in the cache. Not setting this is kind of - silly, since that's the whole purpose of this lib, but it defaults - to `Infinity`. Setting it to a non-number or negative number will - throw a `TypeError`. Setting it to 0 makes it be `Infinity`. -* `maxAge` Maximum age in ms. Items are not pro-actively pruned out - as they age, but if you try to get an item that is too old, it'll - drop it and return undefined instead of giving it to you. - Setting this to a negative value will make everything seem old! - Setting it to a non-number will throw a `TypeError`. -* `length` Function that is used to calculate the length of stored - items. If you're storing strings or buffers, then you probably want - to do something like `function(n, key){return n.length}`. The default is - `function(){return 1}`, which is fine if you want to store `max` - like-sized things. The item is passed as the first argument, and - the key is passed as the second argumnet. -* `dispose` Function that is called on items when they are dropped - from the cache. This can be handy if you want to close file - descriptors or do other cleanup tasks when items are no longer - accessible. Called with `key, value`. It's called *before* - actually removing the item from the internal cache, so if you want - to immediately put it back in, you'll have to do that in a - `nextTick` or `setTimeout` callback or it won't do anything. -* `stale` By default, if you set a `maxAge`, it'll only actually pull - stale items out of the cache when you `get(key)`. (That is, it's - not pre-emptively doing a `setTimeout` or anything.) If you set - `stale:true`, it'll return the stale value before deleting it. If - you don't set this, then it'll return `undefined` when you try to - get a stale entry, as if it had already been deleted. -* `noDisposeOnSet` By default, if you set a `dispose()` method, then - it'll be called whenever a `set()` operation overwrites an existing - key. If you set this option, `dispose()` will only be called when a - key falls out of the cache, not when it is overwritten. -* `updateAgeOnGet` When using time-expiring entries with `maxAge`, - setting this to `true` will make each item's effective time update - to the current time whenever it is retrieved from cache, causing it - to not expire. (It can still fall out of cache based on recency of - use, of course.) - -## API - -* `set(key, value, maxAge)` -* `get(key) => value` - - Both of these will update the "recently used"-ness of the key. - They do what you think. `maxAge` is optional and overrides the - cache `maxAge` option if provided. - - If the key is not found, `get()` will return `undefined`. - - The key and val can be any value. - -* `peek(key)` - - Returns the key value (or `undefined` if not found) without - updating the "recently used"-ness of the key. - - (If you find yourself using this a lot, you *might* be using the - wrong sort of data structure, but there are some use cases where - it's handy.) - -* `del(key)` - - Deletes a key out of the cache. - -* `reset()` - - Clear the cache entirely, throwing away all values. - -* `has(key)` - - Check if a key is in the cache, without updating the recent-ness - or deleting it for being stale. - -* `forEach(function(value,key,cache), [thisp])` - - Just like `Array.prototype.forEach`. Iterates over all the keys - in the cache, in order of recent-ness. (Ie, more recently used - items are iterated over first.) - -* `rforEach(function(value,key,cache), [thisp])` - - The same as `cache.forEach(...)` but items are iterated over in - reverse order. (ie, less recently used items are iterated over - first.) - -* `keys()` - - Return an array of the keys in the cache. - -* `values()` - - Return an array of the values in the cache. - -* `length` - - Return total length of objects in cache taking into account - `length` options function. - -* `itemCount` - - Return total quantity of objects currently in cache. Note, that - `stale` (see options) items are returned as part of this item - count. - -* `dump()` - - Return an array of the cache entries ready for serialization and usage - with 'destinationCache.load(arr)`. - -* `load(cacheEntriesArray)` - - Loads another cache entries array, obtained with `sourceCache.dump()`, - into the cache. The destination cache is reset before loading new entries - -* `prune()` - - Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/pacote/node_modules/lru-cache/index.js b/node_modules/pacote/node_modules/lru-cache/index.js deleted file mode 100644 index 573b6b85b9779..0000000000000 --- a/node_modules/pacote/node_modules/lru-cache/index.js +++ /dev/null @@ -1,334 +0,0 @@ -'use strict' - -// A linked list to keep track of recently-used-ness -const Yallist = require('yallist') - -const MAX = Symbol('max') -const LENGTH = Symbol('length') -const LENGTH_CALCULATOR = Symbol('lengthCalculator') -const ALLOW_STALE = Symbol('allowStale') -const MAX_AGE = Symbol('maxAge') -const DISPOSE = Symbol('dispose') -const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') -const LRU_LIST = Symbol('lruList') -const CACHE = Symbol('cache') -const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') - -const naiveLength = () => 1 - -// lruList is a yallist where the head is the youngest -// item, and the tail is the oldest. the list contains the Hit -// objects as the entries. -// Each Hit object has a reference to its Yallist.Node. This -// never changes. -// -// cache is a Map (or PseudoMap) that matches the keys to -// the Yallist.Node object. -class LRUCache { - constructor (options) { - if (typeof options === 'number') - options = { max: options } - - if (!options) - options = {} - - if (options.max && (typeof options.max !== 'number' || options.max < 0)) - throw new TypeError('max must be a non-negative number') - // Kind of weird to have a default max of Infinity, but oh well. - const max = this[MAX] = options.max || Infinity - - const lc = options.length || naiveLength - this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc - this[ALLOW_STALE] = options.stale || false - if (options.maxAge && typeof options.maxAge !== 'number') - throw new TypeError('maxAge must be a number') - this[MAX_AGE] = options.maxAge || 0 - this[DISPOSE] = options.dispose - this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false - this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false - this.reset() - } - - // resize the cache when the max changes. - set max (mL) { - if (typeof mL !== 'number' || mL < 0) - throw new TypeError('max must be a non-negative number') - - this[MAX] = mL || Infinity - trim(this) - } - get max () { - return this[MAX] - } - - set allowStale (allowStale) { - this[ALLOW_STALE] = !!allowStale - } - get allowStale () { - return this[ALLOW_STALE] - } - - set maxAge (mA) { - if (typeof mA !== 'number') - throw new TypeError('maxAge must be a non-negative number') - - this[MAX_AGE] = mA - trim(this) - } - get maxAge () { - return this[MAX_AGE] - } - - // resize the cache when the lengthCalculator changes. - set lengthCalculator (lC) { - if (typeof lC !== 'function') - lC = naiveLength - - if (lC !== this[LENGTH_CALCULATOR]) { - this[LENGTH_CALCULATOR] = lC - this[LENGTH] = 0 - this[LRU_LIST].forEach(hit => { - hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) - this[LENGTH] += hit.length - }) - } - trim(this) - } - get lengthCalculator () { return this[LENGTH_CALCULATOR] } - - get length () { return this[LENGTH] } - get itemCount () { return this[LRU_LIST].length } - - rforEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].tail; walker !== null;) { - const prev = walker.prev - forEachStep(this, fn, walker, thisp) - walker = prev - } - } - - forEach (fn, thisp) { - thisp = thisp || this - for (let walker = this[LRU_LIST].head; walker !== null;) { - const next = walker.next - forEachStep(this, fn, walker, thisp) - walker = next - } - } - - keys () { - return this[LRU_LIST].toArray().map(k => k.key) - } - - values () { - return this[LRU_LIST].toArray().map(k => k.value) - } - - reset () { - if (this[DISPOSE] && - this[LRU_LIST] && - this[LRU_LIST].length) { - this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) - } - - this[CACHE] = new Map() // hash of items by key - this[LRU_LIST] = new Yallist() // list of items in order of use recency - this[LENGTH] = 0 // length of items in the list - } - - dump () { - return this[LRU_LIST].map(hit => - isStale(this, hit) ? false : { - k: hit.key, - v: hit.value, - e: hit.now + (hit.maxAge || 0) - }).toArray().filter(h => h) - } - - dumpLru () { - return this[LRU_LIST] - } - - set (key, value, maxAge) { - maxAge = maxAge || this[MAX_AGE] - - if (maxAge && typeof maxAge !== 'number') - throw new TypeError('maxAge must be a number') - - const now = maxAge ? Date.now() : 0 - const len = this[LENGTH_CALCULATOR](value, key) - - if (this[CACHE].has(key)) { - if (len > this[MAX]) { - del(this, this[CACHE].get(key)) - return false - } - - const node = this[CACHE].get(key) - const item = node.value - - // dispose of the old one before overwriting - // split out into 2 ifs for better coverage tracking - if (this[DISPOSE]) { - if (!this[NO_DISPOSE_ON_SET]) - this[DISPOSE](key, item.value) - } - - item.now = now - item.maxAge = maxAge - item.value = value - this[LENGTH] += len - item.length - item.length = len - this.get(key) - trim(this) - return true - } - - const hit = new Entry(key, value, len, now, maxAge) - - // oversized objects fall out of cache automatically. - if (hit.length > this[MAX]) { - if (this[DISPOSE]) - this[DISPOSE](key, value) - - return false - } - - this[LENGTH] += hit.length - this[LRU_LIST].unshift(hit) - this[CACHE].set(key, this[LRU_LIST].head) - trim(this) - return true - } - - has (key) { - if (!this[CACHE].has(key)) return false - const hit = this[CACHE].get(key).value - return !isStale(this, hit) - } - - get (key) { - return get(this, key, true) - } - - peek (key) { - return get(this, key, false) - } - - pop () { - const node = this[LRU_LIST].tail - if (!node) - return null - - del(this, node) - return node.value - } - - del (key) { - del(this, this[CACHE].get(key)) - } - - load (arr) { - // reset the cache - this.reset() - - const now = Date.now() - // A previous serialized cache has the most recent items first - for (let l = arr.length - 1; l >= 0; l--) { - const hit = arr[l] - const expiresAt = hit.e || 0 - if (expiresAt === 0) - // the item was created without expiration in a non aged cache - this.set(hit.k, hit.v) - else { - const maxAge = expiresAt - now - // dont add already expired items - if (maxAge > 0) { - this.set(hit.k, hit.v, maxAge) - } - } - } - } - - prune () { - this[CACHE].forEach((value, key) => get(this, key, false)) - } -} - -const get = (self, key, doUse) => { - const node = self[CACHE].get(key) - if (node) { - const hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - return undefined - } else { - if (doUse) { - if (self[UPDATE_AGE_ON_GET]) - node.value.now = Date.now() - self[LRU_LIST].unshiftNode(node) - } - } - return hit.value - } -} - -const isStale = (self, hit) => { - if (!hit || (!hit.maxAge && !self[MAX_AGE])) - return false - - const diff = Date.now() - hit.now - return hit.maxAge ? diff > hit.maxAge - : self[MAX_AGE] && (diff > self[MAX_AGE]) -} - -const trim = self => { - if (self[LENGTH] > self[MAX]) { - for (let walker = self[LRU_LIST].tail; - self[LENGTH] > self[MAX] && walker !== null;) { - // We know that we're about to delete this one, and also - // what the next least recently used key will be, so just - // go ahead and set it now. - const prev = walker.prev - del(self, walker) - walker = prev - } - } -} - -const del = (self, node) => { - if (node) { - const hit = node.value - if (self[DISPOSE]) - self[DISPOSE](hit.key, hit.value) - - self[LENGTH] -= hit.length - self[CACHE].delete(hit.key) - self[LRU_LIST].removeNode(node) - } -} - -class Entry { - constructor (key, value, length, now, maxAge) { - this.key = key - this.value = value - this.length = length - this.now = now - this.maxAge = maxAge || 0 - } -} - -const forEachStep = (self, fn, node, thisp) => { - let hit = node.value - if (isStale(self, hit)) { - del(self, node) - if (!self[ALLOW_STALE]) - hit = undefined - } - if (hit) - fn.call(thisp, hit.value, hit.key, self) -} - -module.exports = LRUCache diff --git a/node_modules/pacote/node_modules/yallist/LICENSE b/node_modules/pacote/node_modules/yallist/LICENSE deleted file mode 100644 index 19129e315fe59..0000000000000 --- a/node_modules/pacote/node_modules/yallist/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/yallist/README.md b/node_modules/pacote/node_modules/yallist/README.md deleted file mode 100644 index f586101869668..0000000000000 --- a/node_modules/pacote/node_modules/yallist/README.md +++ /dev/null @@ -1,204 +0,0 @@ -# yallist - -Yet Another Linked List - -There are many doubly-linked list implementations like it, but this -one is mine. - -For when an array would be too big, and a Map can't be iterated in -reverse order. - - -[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) - -## basic usage - -```javascript -var yallist = require('yallist') -var myList = yallist.create([1, 2, 3]) -myList.push('foo') -myList.unshift('bar') -// of course pop() and shift() are there, too -console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] -myList.forEach(function (k) { - // walk the list head to tail -}) -myList.forEachReverse(function (k, index, list) { - // walk the list tail to head -}) -var myDoubledList = myList.map(function (k) { - return k + k -}) -// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] -// mapReverse is also a thing -var myDoubledListReverse = myList.mapReverse(function (k) { - return k + k -}) // ['foofoo', 6, 4, 2, 'barbar'] - -var reduced = myList.reduce(function (set, entry) { - set += entry - return set -}, 'start') -console.log(reduced) // 'startfoo123bar' -``` - -## api - -The whole API is considered "public". - -Functions with the same name as an Array method work more or less the -same way. - -There's reverse versions of most things because that's the point. - -### Yallist - -Default export, the class that holds and manages a list. - -Call it with either a forEach-able (like an array) or a set of -arguments, to initialize the list. - -The Array-ish methods all act like you'd expect. No magic length, -though, so if you change that it won't automatically prune or add -empty spots. - -### Yallist.create(..) - -Alias for Yallist function. Some people like factories. - -#### yallist.head - -The first node in the list - -#### yallist.tail - -The last node in the list - -#### yallist.length - -The number of nodes in the list. (Change this at your peril. It is -not magic like Array length.) - -#### yallist.toArray() - -Convert the list to an array. - -#### yallist.forEach(fn, [thisp]) - -Call a function on each item in the list. - -#### yallist.forEachReverse(fn, [thisp]) - -Call a function on each item in the list, in reverse order. - -#### yallist.get(n) - -Get the data at position `n` in the list. If you use this a lot, -probably better off just using an Array. - -#### yallist.getReverse(n) - -Get the data at position `n`, counting from the tail. - -#### yallist.map(fn, thisp) - -Create a new Yallist with the result of calling the function on each -item. - -#### yallist.mapReverse(fn, thisp) - -Same as `map`, but in reverse. - -#### yallist.pop() - -Get the data from the list tail, and remove the tail from the list. - -#### yallist.push(item, ...) - -Insert one or more items to the tail of the list. - -#### yallist.reduce(fn, initialValue) - -Like Array.reduce. - -#### yallist.reduceReverse - -Like Array.reduce, but in reverse. - -#### yallist.reverse - -Reverse the list in place. - -#### yallist.shift() - -Get the data from the list head, and remove the head from the list. - -#### yallist.slice([from], [to]) - -Just like Array.slice, but returns a new Yallist. - -#### yallist.sliceReverse([from], [to]) - -Just like yallist.slice, but the result is returned in reverse. - -#### yallist.toArray() - -Create an array representation of the list. - -#### yallist.toArrayReverse() - -Create a reversed array representation of the list. - -#### yallist.unshift(item, ...) - -Insert one or more items to the head of the list. - -#### yallist.unshiftNode(node) - -Move a Node object to the front of the list. (That is, pull it out of -wherever it lives, and make it the new head.) - -If the node belongs to a different list, then that list will remove it -first. - -#### yallist.pushNode(node) - -Move a Node object to the end of the list. (That is, pull it out of -wherever it lives, and make it the new tail.) - -If the node belongs to a list already, then that list will remove it -first. - -#### yallist.removeNode(node) - -Remove a node from the list, preserving referential integrity of head -and tail and other nodes. - -Will throw an error if you try to have a list remove a node that -doesn't belong to it. - -### Yallist.Node - -The class that holds the data and is actually the list. - -Call with `var n = new Node(value, previousNode, nextNode)` - -Note that if you do direct operations on Nodes themselves, it's very -easy to get into weird states where the list is broken. Be careful :) - -#### node.next - -The next node in the list. - -#### node.prev - -The previous node in the list. - -#### node.value - -The data the node contains. - -#### node.list - -The list to which this node belongs. (Null if it does not belong to -any list.) diff --git a/node_modules/pacote/node_modules/yallist/iterator.js b/node_modules/pacote/node_modules/yallist/iterator.js deleted file mode 100644 index d41c97a19f984..0000000000000 --- a/node_modules/pacote/node_modules/yallist/iterator.js +++ /dev/null @@ -1,8 +0,0 @@ -'use strict' -module.exports = function (Yallist) { - Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value - } - } -} diff --git a/node_modules/pacote/node_modules/yallist/package.json b/node_modules/pacote/node_modules/yallist/package.json deleted file mode 100644 index 9dd5bf96b017c..0000000000000 --- a/node_modules/pacote/node_modules/yallist/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "_from": "yallist@^3.0.0", - "_id": "yallist@3.0.3", - "_inBundle": false, - "_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", - "_location": "/pacote/yallist", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "yallist@^3.0.0", - "name": "yallist", - "escapedName": "yallist", - "rawSpec": "^3.0.0", - "saveSpec": null, - "fetchSpec": "^3.0.0" - }, - "_requiredBy": [ - "/pacote/minipass" - ], - "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "_shasum": "b4b049e314be545e3ce802236d6cd22cd91c3de9", - "_spec": "yallist@^3.0.0", - "_where": "/Users/zkat/Documents/code/work/npm/node_modules/pacote/node_modules/minipass", - "author": { - "name": "Isaac Z. Schlueter", - "email": "i@izs.me", - "url": "http://blog.izs.me/" - }, - "bugs": { - "url": "https://github.com/isaacs/yallist/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Yet Another Linked List", - "devDependencies": { - "tap": "^12.1.0" - }, - "directories": { - "test": "test" - }, - "files": [ - "yallist.js", - "iterator.js" - ], - "homepage": "https://github.com/isaacs/yallist#readme", - "license": "ISC", - "main": "yallist.js", - "name": "yallist", - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/yallist.git" - }, - "scripts": { - "postpublish": "git push origin --all; git push origin --tags", - "postversion": "npm publish", - "preversion": "npm test", - "test": "tap test/*.js --100" - }, - "version": "3.0.3" -} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index 2fd3d4151f4e3..26c7dd0b893aa 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -5,7 +5,8 @@ "_integrity": "sha512-Zqvczvf/zZ7QNosdE9uTC7SRuvSs9tFqRkF6cJl+2HH7COBnx4BRAGpeXJlrbN+mM0CMHpbi620xdEHhCflghA==", "_location": "/pacote", "_phantomChildren": { - "safe-buffer": "5.1.2" + "safe-buffer": "5.1.2", + "yallist": "3.0.3" }, "_requested": { "type": "range", diff --git a/node_modules/query-string/index.d.ts b/node_modules/query-string/index.d.ts index 90b3658aee108..a5bd661c2991d 100644 --- a/node_modules/query-string/index.d.ts +++ b/node_modules/query-string/index.d.ts @@ -1,52 +1,108 @@ export interface ParseOptions { /** - * Decode the keys and values. URI components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component). - * - * @default true - */ + Decode the keys and values. URI components are decoded with [`decode-uri-component`](https://github.com/SamVerschueren/decode-uri-component). + + @default true + */ readonly decode?: boolean; /** - * @default 'none' - * - * - `bracket`: Parse arrays with bracket representation: - * - * - * queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'}); - * //=> foo: [1, 2, 3] - * - * - `index`: Parse arrays with index representation: - * - * - * queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'}); - * //=> foo: [1, 2, 3] - * - * - `comma`: Parse arrays with elements separated by comma: - * - * - * queryString.parse('foo=1,2,3', {arrayFormat: 'comma'}); - * //=> foo: [1, 2, 3] - * - * - `none`: Parse arrays with elements using duplicate keys: - * - * - * queryString.parse('foo=1&foo=2&foo=3'); - * //=> foo: [1, 2, 3] - */ + @default 'none' + + - `bracket`: Parse arrays with bracket representation: + + ``` + queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'}); + //=> {foo: ['1', '2', '3']} + ``` + + - `index`: Parse arrays with index representation: + + ``` + queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'}); + //=> {foo: ['1', '2', '3']} + ``` + + - `comma`: Parse arrays with elements separated by comma: + + ``` + queryString.parse('foo=1,2,3', {arrayFormat: 'comma'}); + //=> {foo: ['1', '2', '3']} + ``` + + - `none`: Parse arrays with elements using duplicate keys: + + ``` + queryString.parse('foo=1&foo=2&foo=3'); + //=> {foo: ['1', '2', '3']} + ``` + */ readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'none'; + + /** + Supports both `Function` as a custom sorting function or `false` to disable sorting. + + If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order. + + @default true + + @example + ``` + const order = ['c', 'a', 'b']; + + queryString.parse('?a=one&b=two&c=three', { + sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight) + }); + // => {c: 'three', a: 'one', b: 'two'} + ``` + + queryString.parse('?a=one&c=three&b=two', {sort: false}); + // => {a: 'one', c: 'three', b: 'two'} + ``` + */ + readonly sort?: ((itemLeft: string, itemRight: string) => number) | false; + + /** + Parse the value as a number type instead of string type if it's a number. + + @default false + + @example + ```js + queryString.parse('foo=1', {parseNumbers: true}); + //=> {foo: 1} + ``` + */ + readonly parseNumbers?: boolean; + + /** + Parse the value as a boolean type instead of string type if it's a boolean. + + @default false + + @example + ``` + queryString.parse('foo=true', {parseBooleans: true}); + //=> {foo: true} + ``` + */ + readonly parseBooleans?: boolean; } -export interface ParsedQuery { - readonly [key: string]: string | string[] | null | undefined; +export interface ParsedQuery { + [key: string]: T | T[] | null | undefined; } /** - * Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly. - * - * The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`. - * - * @param query - The query string to parse. - */ +Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly. + +The returned object is created with [`Object.create(null)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/create) and thus does not have a `prototype`. + +@param query - The query string to parse. +*/ +export function parse(query: string, options: {parseBooleans: true, parseNumbers: true} & ParseOptions): ParsedQuery; +export function parse(query: string, options: {parseBooleans: true} & ParseOptions): ParsedQuery; +export function parse(query: string, options: {parseNumbers: true} & ParseOptions): ParsedQuery; export function parse(query: string, options?: ParseOptions): ParsedQuery; export interface ParsedUrl { @@ -55,89 +111,98 @@ export interface ParsedUrl { } /** - * Extract the URL and the query string as an object. - * - * @param url - The URL to parse. - * - * @example - * - * queryString.parseUrl('https://foo.bar?foo=bar'); - * //=> {url: 'https://foo.bar', query: {foo: 'bar'}} - */ +Extract the URL and the query string as an object. + +@param url - The URL to parse. + +@example +``` +queryString.parseUrl('https://foo.bar?foo=bar'); +//=> {url: 'https://foo.bar', query: {foo: 'bar'}} +``` +*/ export function parseUrl(url: string, options?: ParseOptions): ParsedUrl; export interface StringifyOptions { /** - * Strictly encode URI components with [`strict-uri-encode`](https://github.com/kevva/strict-uri-encode). It uses [`encodeURIComponent`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to `false`. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option. - * - * @default true - */ + Strictly encode URI components with [`strict-uri-encode`](https://github.com/kevva/strict-uri-encode). It uses [`encodeURIComponent`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) if set to `false`. You probably [don't care](https://github.com/sindresorhus/query-string/issues/42) about this option. + + @default true + */ readonly strict?: boolean; /** - * [URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values. - * - * @default true - */ + [URL encode](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) the keys and values. + + @default true + */ readonly encode?: boolean; /** - * @default 'none' - * - * - `bracket`: Serialize arrays using bracket representation: - * - * - * queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'}); - * //=> 'foo[]=1&foo[]=2&foo[]=3' - * - * - `index`: Serialize arrays using index representation: - * - * - * queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'}); - * //=> 'foo[0]=1&foo[1]=2&foo[3]=3' - * - * - `comma`: Serialize arrays by separating elements with comma: - * - * - * queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'}); - * //=> 'foo=1,2,3' - * - * - `none`: Serialize arrays by using duplicate keys: - * - * - * queryString.stringify({foo: [1, 2, 3]}); - * //=> 'foo=1&foo=2&foo=3' - */ + @default 'none' + + - `bracket`: Serialize arrays using bracket representation: + + ``` + queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'}); + //=> 'foo[]=1&foo[]=2&foo[]=3' + ``` + + - `index`: Serialize arrays using index representation: + + ``` + queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'}); + //=> 'foo[0]=1&foo[1]=2&foo[2]=3' + ``` + + - `comma`: Serialize arrays by separating elements with comma: + + ``` + queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'}); + //=> 'foo=1,2,3' + ``` + + - `none`: Serialize arrays by using duplicate keys: + + ``` + queryString.stringify({foo: [1, 2, 3]}); + //=> 'foo=1&foo=2&foo=3' + ``` + */ readonly arrayFormat?: 'bracket' | 'index' | 'comma' | 'none'; /** - * Supports both `Function` as a custom sorting function or `false` to disable sorting. - * - * If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order. - * - * @example - * - * const order = ['c', 'a', 'b']; - * queryString.stringify({a: 1, b: 2, c: 3}, { - * sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight) - * }); - * // => 'c=3&a=1&b=2' - * - * queryString.stringify({b: 1, c: 2, a: 3}, {sort: false}); - * // => 'b=1&c=2&a=3' - */ + Supports both `Function` as a custom sorting function or `false` to disable sorting. + + If omitted, keys are sorted using `Array#sort`, which means, converting them to strings and comparing strings in Unicode code point order. + + @default true + + @example + ``` + const order = ['c', 'a', 'b']; + + queryString.stringify({a: 1, b: 2, c: 3}, { + sort: (itemLeft, itemRight) => order.indexOf(itemLeft) - order.indexOf(itemRight) + }); + // => 'c=3&a=1&b=2' + + queryString.stringify({b: 1, c: 2, a: 3}, {sort: false}); + // => 'b=1&c=2&a=3' + ``` + */ readonly sort?: ((itemLeft: string, itemRight: string) => number) | false; } /** - * Stringify an object into a query string and sorting the keys. - */ +Stringify an object into a query string and sort the keys. +*/ export function stringify( - object: {[key: string]: unknown}, + object: {[key: string]: any}, options?: StringifyOptions ): string; /** - * Extract a query string from a URL that can be passed into `.parse()`. - */ +Extract a query string from a URL that can be passed into `.parse()`. +*/ export function extract(url: string): string; diff --git a/node_modules/query-string/index.js b/node_modules/query-string/index.js index 3cb1adf8d3045..da459c9404d0b 100644 --- a/node_modules/query-string/index.js +++ b/node_modules/query-string/index.js @@ -1,6 +1,7 @@ 'use strict'; const strictUriEncode = require('strict-uri-encode'); const decodeComponent = require('decode-uri-component'); +const splitOnFirst = require('split-on-first'); function encoderForArrayFormat(options) { switch (options.arrayFormat) { @@ -36,7 +37,7 @@ function encoderForArrayFormat(options) { case 'comma': return key => (result, value, index) => { - if (!value) { + if (value === null || value === undefined || value.length === 0) { return result; } @@ -151,7 +152,17 @@ function keysSorter(input) { return input; } +function removeHash(input) { + const hashStart = input.indexOf('#'); + if (hashStart !== -1) { + input = input.slice(0, hashStart); + } + + return input; +} + function extract(input) { + input = removeHash(input); const queryStart = input.indexOf('?'); if (queryStart === -1) { return ''; @@ -163,7 +174,10 @@ function extract(input) { function parse(input, options) { options = Object.assign({ decode: true, - arrayFormat: 'none' + sort: true, + arrayFormat: 'none', + parseNumbers: false, + parseBooleans: false }, options); const formatter = parserForArrayFormat(options); @@ -182,16 +196,26 @@ function parse(input, options) { } for (const param of input.split('&')) { - let [key, value] = param.replace(/\+/g, ' ').split('='); + let [key, value] = splitOnFirst(param.replace(/\+/g, ' '), '='); // Missing `=` should be `null`: // http://w3.org/TR/2012/WD-url-20120524/#collect-url-parameters value = value === undefined ? null : decode(value, options); + if (options.parseNumbers && !Number.isNaN(Number(value))) { + value = Number(value); + } else if (options.parseBooleans && value !== null && (value.toLowerCase() === 'true' || value.toLowerCase() === 'false')) { + value = value.toLowerCase() === 'true'; + } + formatter(decode(key, options), value, ret); } - return Object.keys(ret).sort().reduce((result, key) => { + if (options.sort === false) { + return ret; + } + + return (options.sort === true ? Object.keys(ret).sort() : Object.keys(ret).sort(options.sort)).reduce((result, key) => { const value = ret[key]; if (Boolean(value) && typeof value === 'object' && !Array.isArray(value)) { // Sort object keys, not values @@ -247,13 +271,8 @@ exports.stringify = (object, options) => { }; exports.parseUrl = (input, options) => { - const hashStart = input.indexOf('#'); - if (hashStart !== -1) { - input = input.slice(0, hashStart); - } - return { - url: input.split('?')[0] || '', + url: removeHash(input).split('?')[0] || '', query: parse(extract(input), options) }; }; diff --git a/node_modules/query-string/package.json b/node_modules/query-string/package.json index cd62b3cf54fba..916b3fe5a1714 100644 --- a/node_modules/query-string/package.json +++ b/node_modules/query-string/package.json @@ -1,28 +1,28 @@ { - "_from": "query-string@6.4.0", - "_id": "query-string@6.4.0", + "_from": "query-string@6.8.1", + "_id": "query-string@6.8.1", "_inBundle": false, - "_integrity": "sha512-Werid2I41/tJTqOGPJ3cC3vwrIh/8ZupBQbp7BSsqXzr+pTin3aMJ/EZb8UEuk7ZO3VqQFvq2qck/ihc6wqIdw==", + "_integrity": "sha512-g6y0Lbq10a5pPQpjlFuojfMfV1Pd2Jw9h75ypiYPPia3Gcq2rgkKiIwbkS6JxH7c5f5u/B/sB+d13PU+g1eu4Q==", "_location": "/query-string", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "query-string@6.4.0", + "raw": "query-string@6.8.1", "name": "query-string", "escapedName": "query-string", - "rawSpec": "6.4.0", + "rawSpec": "6.8.1", "saveSpec": null, - "fetchSpec": "6.4.0" + "fetchSpec": "6.8.1" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/query-string/-/query-string-6.4.0.tgz", - "_shasum": "1566c0cec3a2da2d82c222ed3f9e2a921dba5e6a", - "_spec": "query-string@6.4.0", - "_where": "/Users/aeschright/code/cli", + "_resolved": "https://registry.npmjs.org/query-string/-/query-string-6.8.1.tgz", + "_shasum": "62c54a7ef37d01b538c8fd56f95740c81d438a26", + "_spec": "query-string@6.8.1", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -34,15 +34,16 @@ "bundleDependencies": false, "dependencies": { "decode-uri-component": "^0.2.0", + "split-on-first": "^1.0.0", "strict-uri-encode": "^2.0.0" }, "deprecated": false, "description": "Parse and stringify URL query strings", "devDependencies": { - "ava": "^1.3.1", + "ava": "^1.4.1", "deep-equal": "^1.0.1", "fast-check": "^1.5.0", - "tsd-check": "^0.3.0", + "tsd": "^0.7.3", "xo": "^0.24.0" }, "engines": { @@ -75,7 +76,7 @@ "url": "git+https://github.com/sindresorhus/query-string.git" }, "scripts": { - "test": "xo && ava && tsd-check" + "test": "xo && ava && tsd" }, - "version": "6.4.0" + "version": "6.8.1" } diff --git a/node_modules/query-string/readme.md b/node_modules/query-string/readme.md index fda9323cdecf9..297a47d5a00e9 100644 --- a/node_modules/query-string/readme.md +++ b/node_modules/query-string/readme.md @@ -11,10 +11,6 @@ $ npm install query-string This module targets Node.js 6 or later and the latest version of Chrome, Firefox, and Safari. If you want support for older browsers, or, [if your project is using create-react-app v1](https://github.com/sindresorhus/query-string/pull/148#issuecomment-399656020), use version 5: `npm install query-string@5`. - - - - ## Usage @@ -50,7 +46,7 @@ console.log(location.search); ## API -### .parse(string, [options]) +### .parse(string, options?) Parse a query string into an object. Leading `?` or `#` are ignored, so you can pass `location.search` or `location.hash` directly. @@ -58,7 +54,7 @@ The returned object is created with [`Object.create(null)`](https://developer.mo #### options -Type: `Object` +Type: `object` ##### decode @@ -70,43 +66,74 @@ Decode the keys and values. URL components are decoded with [`decode-uri-compone ##### arrayFormat Type: `string`
-Default: `none` +Default: `'none'` -- `bracket`: Parse arrays with bracket representation: +- `'bracket'`: Parse arrays with bracket representation: ```js queryString.parse('foo[]=1&foo[]=2&foo[]=3', {arrayFormat: 'bracket'}); -//=> foo: [1, 2, 3] +//=> {foo: ['1', '2', '3']} ``` -- `index`: Parse arrays with index representation: +- `'index'`: Parse arrays with index representation: ```js queryString.parse('foo[0]=1&foo[1]=2&foo[3]=3', {arrayFormat: 'index'}); -//=> foo: [1, 2, 3] +//=> {foo: ['1', '2', '3']} ``` -- `comma`: Parse arrays with elements separated by comma: +- `'comma'`: Parse arrays with elements separated by comma: ```js queryString.parse('foo=1,2,3', {arrayFormat: 'comma'}); -//=> foo: [1, 2, 3] +//=> {foo: ['1', '2', '3']} ``` -- `none`: Parse arrays with elements using duplicate keys: +- `'none'`: Parse arrays with elements using duplicate keys: ```js queryString.parse('foo=1&foo=2&foo=3'); -//=> foo: [1, 2, 3] +//=> {foo: ['1', '2', '3']} ``` +##### sort + +Type: `Function | boolean`
+Default: `true` + +Supports both `Function` as a custom sorting function or `false` to disable sorting. + +##### parseNumbers + +Type: `boolean`
+Default: `false` + +```js +queryString.parse('foo=1', {parseNumbers: true}); +//=> {foo: 1} +``` + +Parse the value as a number type instead of string type if it's a number. + +##### parseBooleans + +Type: `boolean`
+Default: `false` + +```js +queryString.parse('foo=true', {parseBooleans: true}); +//=> {foo: true} +``` + +Parse the value as a boolean type instead of string type if it's a boolean. + ### .stringify(object, [options]) Stringify an object into a query string and sorting the keys. #### options -Type: `Object` +Type: `object` ##### strict @@ -125,30 +152,30 @@ Default: `true` ##### arrayFormat Type: `string`
-Default: `none` +Default: `'none'` -- `bracket`: Serialize arrays using bracket representation: +- `'bracket'`: Serialize arrays using bracket representation: ```js queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'bracket'}); //=> 'foo[]=1&foo[]=2&foo[]=3' ``` -- `index`: Serialize arrays using index representation: +- `'index'`: Serialize arrays using index representation: ```js queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'index'}); -//=> 'foo[0]=1&foo[1]=2&foo[3]=3' +//=> 'foo[0]=1&foo[1]=2&foo[2]=3' ``` -- `comma`: Serialize arrays by separating elements with comma: +- `'comma'`: Serialize arrays by separating elements with comma: ```js queryString.stringify({foo: [1, 2, 3]}, {arrayFormat: 'comma'}); //=> 'foo=1,2,3' ``` -- `none`: Serialize arrays by using duplicate keys: +- `'none'`: Serialize arrays by using duplicate keys: ```js queryString.stringify({foo: [1, 2, 3]}); @@ -181,7 +208,7 @@ If omitted, keys are sorted using `Array#sort()`, which means, converting them t Extract a query string from a URL that can be passed into `.parse()`. -### .parseUrl(string, [options]) +### .parseUrl(string, options?) Extract the URL and the query string as an object. @@ -218,7 +245,7 @@ queryString.parse('likes=cake&name=bob&likes=icecream'); //=> {likes: ['cake', 'icecream'], name: 'bob'} queryString.stringify({color: ['taupe', 'chartreuse'], id: '515'}); -//=> 'color=chartreuse&color=taupe&id=515' +//=> 'color=taupe&color=chartreuse&id=515' ``` @@ -238,6 +265,14 @@ queryString.stringify({foo: undefined}); ``` -## License +--- -MIT © [Sindre Sorhus](https://sindresorhus.com) +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json index e2cd943594977..67c7e602f2627 100644 --- a/node_modules/readable-stream/package.json +++ b/node_modules/readable-stream/package.json @@ -1,19 +1,19 @@ { - "_from": "readable-stream@3.3.0", - "_id": "readable-stream@3.3.0", + "_from": "readable-stream@3.4.0", + "_id": "readable-stream@3.4.0", "_inBundle": false, - "_integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", + "_integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", "_location": "/readable-stream", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "readable-stream@3.3.0", + "raw": "readable-stream@3.4.0", "name": "readable-stream", "escapedName": "readable-stream", - "rawSpec": "3.3.0", + "rawSpec": "3.4.0", "saveSpec": null, - "fetchSpec": "3.3.0" + "fetchSpec": "3.4.0" }, "_requiredBy": [ "#USER", @@ -21,9 +21,9 @@ "/bl", "/tar-stream" ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", - "_shasum": "cb8011aad002eb717bf040291feba8569c986fb9", - "_spec": "readable-stream@3.3.0", + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "_shasum": "a51c26754658e0a3c21dbf59163bd45ba6f447fc", + "_spec": "readable-stream@3.4.0", "_where": "/Users/isaacs/dev/npm/cli", "browser": { "util": false, @@ -59,7 +59,7 @@ "nyc": "^11.0.0", "pump": "^3.0.0", "rimraf": "^2.6.2", - "tap": "^11.0.0", + "tap": "^12.0.0", "tape": "^4.9.0", "tar-fs": "^1.16.2", "util-promisify": "^2.1.0" @@ -86,13 +86,13 @@ "url": "git://github.com/nodejs/readable-stream.git" }, "scripts": { - "ci": "TAP=1 tap test/parallel/*.js test/ours/*.js | tee test.tap", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", "cover": "nyc npm test", "report": "nyc report --reporter=lcov", - "test": "tap -j 4 test/parallel/*.js test/ours/*.js", + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", "test-browser-local": "airtap --open --local -- test/browser.js", "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", "update-browser-errors": "babel -o errors-browser.js errors.js" }, - "version": "3.3.0" + "version": "3.4.0" } diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js index e50372592ee6c..adbf60de832f9 100644 --- a/node_modules/readable-stream/readable-browser.js +++ b/node_modules/readable-stream/readable-browser.js @@ -5,3 +5,5 @@ exports.Writable = require('./lib/_stream_writable.js'); exports.Duplex = require('./lib/_stream_duplex.js'); exports.Transform = require('./lib/_stream_transform.js'); exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/split-on-first/index.d.ts b/node_modules/split-on-first/index.d.ts new file mode 100644 index 0000000000000..6123bef9126ab --- /dev/null +++ b/node_modules/split-on-first/index.d.ts @@ -0,0 +1,29 @@ +/** +Split a string on the first occurrence of a given separator. + +@param string - The string to split. +@param separator - The separator to split on. + +@example +``` +import splitOnFirst = require('split-on-first'); + +splitOnFirst('a-b-c', '-'); +//=> ['a', 'b-c'] + +splitOnFirst('key:value:value2', ':'); +//=> ['key', 'value:value2'] + +splitOnFirst('a---b---c', '---'); +//=> ['a', 'b---c'] + +splitOnFirst('a-b-c', '+'); +//=> ['a-b-c'] +``` +*/ +declare function splitOnFirst( + string: string, + separator: string +): [string, string?]; + +export = splitOnFirst; diff --git a/node_modules/split-on-first/index.js b/node_modules/split-on-first/index.js new file mode 100644 index 0000000000000..d9140706dd05b --- /dev/null +++ b/node_modules/split-on-first/index.js @@ -0,0 +1,22 @@ +'use strict'; + +module.exports = (string, separator) => { + if (!(typeof string === 'string' && typeof separator === 'string')) { + throw new TypeError('Expected the arguments to be of type `string`'); + } + + if (separator === '') { + return [string]; + } + + const separatorIndex = string.indexOf(separator); + + if (separatorIndex === -1) { + return [string]; + } + + return [ + string.slice(0, separatorIndex), + string.slice(separatorIndex + separator.length) + ]; +}; diff --git a/node_modules/split-on-first/license b/node_modules/split-on-first/license new file mode 100644 index 0000000000000..e7af2f77107d7 --- /dev/null +++ b/node_modules/split-on-first/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/split-on-first/package.json b/node_modules/split-on-first/package.json new file mode 100644 index 0000000000000..d6f46a9016382 --- /dev/null +++ b/node_modules/split-on-first/package.json @@ -0,0 +1,68 @@ +{ + "_from": "split-on-first@^1.0.0", + "_id": "split-on-first@1.1.0", + "_inBundle": false, + "_integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", + "_location": "/split-on-first", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "split-on-first@^1.0.0", + "name": "split-on-first", + "escapedName": "split-on-first", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/query-string" + ], + "_resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "_shasum": "f610afeee3b12bce1d0c30425e76398b78249a5f", + "_spec": "split-on-first@^1.0.0", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/query-string", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/split-on-first/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Split a string on the first occurance of a given separator", + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + }, + "engines": { + "node": ">=6" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/sindresorhus/split-on-first#readme", + "keywords": [ + "split", + "string", + "first", + "occurrence", + "separator", + "delimiter", + "text" + ], + "license": "MIT", + "name": "split-on-first", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/split-on-first.git" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "version": "1.1.0" +} diff --git a/node_modules/split-on-first/readme.md b/node_modules/split-on-first/readme.md new file mode 100644 index 0000000000000..2463cf1cce467 --- /dev/null +++ b/node_modules/split-on-first/readme.md @@ -0,0 +1,58 @@ +# split-on-first [![Build Status](https://travis-ci.com/sindresorhus/split-on-first.svg?branch=master)](https://travis-ci.com/sindresorhus/split-on-first) + +> Split a string on the first occurrence of a given separator + +This is similar to [`String#split()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split), but that one splits on all the occurrences, not just the first one. + + +## Install + +``` +$ npm install split-on-first +``` + + +## Usage + +```js +const splitOnFirst = require('split-on-first'); + +splitOnFirst('a-b-c', '-'); +//=> ['a', 'b-c'] + +splitOnFirst('key:value:value2', ':'); +//=> ['key', 'value:value2'] + +splitOnFirst('a---b---c', '---'); +//=> ['a', 'b---c'] + +splitOnFirst('a-b-c', '+'); +//=> ['a-b-c'] +``` + + +## API + +### splitOnFirst(string, separator) + +#### string + +Type: `string` + +The string to split. + +#### separator + +Type: `string` + +The separator to split on. + + +## Related + +- [split-at](https://github.com/sindresorhus/split-at) - Split a string at one or more indices + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/tar-stream/headers.js b/node_modules/tar-stream/headers.js index 8571c8739d791..fe7fd36735c8d 100644 --- a/node_modules/tar-stream/headers.js +++ b/node_modules/tar-stream/headers.js @@ -3,8 +3,13 @@ var alloc = Buffer.alloc var ZEROS = '0000000000000000000' var SEVENS = '7777777777777777777' var ZERO_OFFSET = '0'.charCodeAt(0) -var USTAR = 'ustar\x0000' +var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary') +var USTAR_VER = Buffer.from('00', 'binary') +var GNU_MAGIC = Buffer.from('ustar\x20', 'binary') +var GNU_VER = Buffer.from('\x20\x00', 'binary') var MASK = parseInt('7777', 8) +var MAGIC_OFFSET = 257 +var VERSION_OFFSET = 263 var clamp = function (index, len, defaultValue) { if (typeof index !== 'number') return defaultValue @@ -223,7 +228,8 @@ exports.encode = function (opts) { if (opts.linkname) buf.write(opts.linkname, 157) - buf.write(USTAR, 257) + USTAR_MAGIC.copy(buf, MAGIC_OFFSET) + USTAR_VER.copy(buf, VERSION_OFFSET) if (opts.uname) buf.write(opts.uname, 265) if (opts.gname) buf.write(opts.gname, 297) buf.write(encodeOct(opts.devmajor || 0, 6), 329) @@ -252,11 +258,6 @@ exports.decode = function (buf, filenameEncoding) { var devmajor = decodeOct(buf, 329, 8) var devminor = decodeOct(buf, 337, 8) - if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name - - // to support old tar versions that use trailing / to indicate dirs - if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5 - var c = cksum(buf) // checksum is still initial value if header was null. @@ -265,6 +266,21 @@ exports.decode = function (buf, filenameEncoding) { // valid checksum if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?') + if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) { + // ustar (posix) format. + // prepend prefix, if present. + if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name + } else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 && + GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) { + // 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and + // multi-volume tarballs. + } else { + throw new Error('Invalid tar header: unknown format.') + } + + // to support old tar versions that use trailing / to indicate dirs + if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5 + return { name: name, mode: mode, diff --git a/node_modules/tar-stream/package.json b/node_modules/tar-stream/package.json index 9ec13dca1f28a..9acb919dca310 100644 --- a/node_modules/tar-stream/package.json +++ b/node_modules/tar-stream/package.json @@ -1,28 +1,28 @@ { - "_from": "tar-stream@2.0.1", - "_id": "tar-stream@2.0.1", + "_from": "tar-stream@2.1.0", + "_id": "tar-stream@2.1.0", "_inBundle": false, - "_integrity": "sha512-I6OJF7wE62BC6zNPdHDtseK0D0187PBjbKSLYY4ffvVkBM6tyBn2O9plDvVM2229/mozfEL/X3++qSvYYQE2xw==", + "_integrity": "sha512-+DAn4Nb4+gz6WZigRzKEZl1QuJVOLtAwwF+WUxy1fJ6X63CaGaUAxJRD2KEn1OMfcbCjySTYpNC6WmfQoIEOdw==", "_location": "/tar-stream", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "tar-stream@2.0.1", + "raw": "tar-stream@2.1.0", "name": "tar-stream", "escapedName": "tar-stream", - "rawSpec": "2.0.1", + "rawSpec": "2.1.0", "saveSpec": null, - "fetchSpec": "2.0.1" + "fetchSpec": "2.1.0" }, "_requiredBy": [ "#DEV:/", "#USER" ], - "_resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.0.1.tgz", - "_shasum": "42fbe41cd1cc5e6657c813e7d98e7afca2858a8c", - "_spec": "tar-stream@2.0.1", - "_where": "/Users/aeschright/code/cli", + "_resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.0.tgz", + "_shasum": "d1aaa3661f05b38b5acc9b7020efdca5179a2cc3", + "_spec": "tar-stream@2.1.0", + "_where": "/Users/isaacs/dev/npm/cli", "author": { "name": "Mathias Buus", "email": "mathiasbuus@gmail.com" @@ -80,5 +80,5 @@ "test": "standard && tape test/extract.js test/pack.js", "test-all": "standard && tape test/*.js" }, - "version": "2.0.1" + "version": "2.1.0" } diff --git a/node_modules/worker-farm/.travis.yml b/node_modules/worker-farm/.travis.yml index 1c9e2b559d5da..7af56429c010b 100644 --- a/node_modules/worker-farm/.travis.yml +++ b/node_modules/worker-farm/.travis.yml @@ -1,9 +1,9 @@ language: node_js node_js: - - 4 - 6 - 8 - - 9 + - 10 + - 12 branches: only: - master diff --git a/node_modules/worker-farm/README.md b/node_modules/worker-farm/README.md index 982b37cb5dfb9..c52689ed9fc58 100644 --- a/node_modules/worker-farm/README.md +++ b/node_modules/worker-farm/README.md @@ -1,4 +1,4 @@ -# Worker Farm [![Build Status](https://secure.travis-ci.org/rvagg/node-worker-farm.png)](http://travis-ci.org/rvagg/node-worker-farm) +# Worker Farm [![Build Status](https://secure.travis-ci.org/rvagg/node-worker-farm.svg)](http://travis-ci.org/rvagg/node-worker-farm) [![NPM](https://nodei.co/npm/worker-farm.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/worker-farm/) [![NPM](https://nodei.co/npm-dl/worker-farm.png?months=6&height=3)](https://nodei.co/npm/worker-farm/) @@ -111,6 +111,7 @@ If you don't provide an `options` object then the following defaults will be use , maxCallTime : Infinity , maxRetries : Infinity , autoStart : false + , onChild : function() {} } ``` @@ -130,6 +131,8 @@ If you don't provide an `options` object then the following defaults will be use * **autoStart** when set to `true` will start the workers as early as possible. Use this when your workers have to do expensive initialization. That way they'll be ready when the first request comes through. + * **onChild** when new child process starts this callback will be called with subprocess object as an argument. Use this when you need to add some custom communication with child processes. + ### workerFarm.end(farm) Child processes stay alive waiting for jobs indefinitely and your farm manager will stay alive managing its workers, so if you need it to stop then you have to do so explicitly. If you send your farm API to `workerFarm.end()` then it'll cleanly end your worker processes. Note though that it's a *soft* ending so it'll wait for child processes to finish what they are working on before asking them to die. diff --git a/node_modules/worker-farm/index.d.ts b/node_modules/worker-farm/index.d.ts index 682c21f410c51..310e91503fda4 100644 --- a/node_modules/worker-farm/index.d.ts +++ b/node_modules/worker-farm/index.d.ts @@ -1,17 +1,15 @@ -interface Workers { - (callback: WorkerCallback): void; - (arg1: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, arg3: any, callback: WorkerCallback): void; - (arg1: any, arg2: any, arg3: any, arg4: any, callback: WorkerCallback): void; -} +import { ForkOptions } from "child_process"; + +export = Farm; -type WorkerCallback = - | WorkerCallback0 - | WorkerCallback1 - | WorkerCallback2 - | WorkerCallback3 - | WorkerCallback4; +declare function Farm(name: string): Farm.Workers; +declare function Farm(name: string, exportedMethods: string[]): Farm.Workers; +declare function Farm(options: Farm.FarmOptions, name: string): Farm.Workers; +declare function Farm( + options: Farm.FarmOptions, + name: string, + exportedMethods: string[], +): Farm.Workers; type WorkerCallback0 = () => void; type WorkerCallback1 = (arg1: any) => void; @@ -19,26 +17,39 @@ type WorkerCallback2 = (arg1: any, arg2: any) => void; type WorkerCallback3 = (arg1: any, arg2: any, arg3: any) => void; type WorkerCallback4 = (arg1: any, arg2: any, arg3: any, arg4: any) => void; -interface FarmOptions { - maxCallsPerWorker?: number - maxConcurrentWorkers?: number - maxConcurrentCallsPerWorker?: number - maxConcurrentCalls?: number - maxCallTime?: number - maxRetries?: number - autoStart?: boolean -} +declare namespace Farm { + export function end(workers: Workers, callback?: Function): void; -interface WorkerFarm { - (name: string): Workers; - (name: string, exportedMethods: string[]): Workers; - (options: FarmOptions, name: string): Workers; - (options: FarmOptions, name: string, exportedMethods: string[]): Workers; + export interface Workers { + [x: string]: Workers, + (callback: WorkerCallback): void; + (arg1: any, callback: WorkerCallback): void; + (arg1: any, arg2: any, callback: WorkerCallback): void; + (arg1: any, arg2: any, arg3: any, callback: WorkerCallback): void; + ( + arg1: any, + arg2: any, + arg3: any, + arg4: any, + callback: WorkerCallback, + ): void; + } - end: (workers: Workers) => void; -} + export interface FarmOptions { + maxCallsPerWorker?: number; + maxConcurrentWorkers?: number; + maxConcurrentCallsPerWorker?: number; + maxConcurrentCalls?: number; + maxCallTime?: number; + maxRetries?: number; + autoStart?: boolean; + workerOptions?: ForkOptions; + } -declare module "worker-farm" { - const workerFarm: WorkerFarm; - export = workerFarm; + export type WorkerCallback = + | WorkerCallback0 + | WorkerCallback1 + | WorkerCallback2 + | WorkerCallback3 + | WorkerCallback4; } diff --git a/node_modules/worker-farm/lib/child/index.js b/node_modules/worker-farm/lib/child/index.js index f91e08433ab1f..78f63371392af 100644 --- a/node_modules/worker-farm/lib/child/index.js +++ b/node_modules/worker-farm/lib/child/index.js @@ -29,7 +29,7 @@ function handle (data) { _args[0][key] = e[key] }) } - process.send({ idx: idx, child: child, args: _args }) + process.send({ owner: 'farm', idx: idx, child: child, args: _args }) } , exec @@ -46,7 +46,11 @@ function handle (data) { process.on('message', function (data) { + if (data.owner !== 'farm') { + return; + } + if (!$module) return $module = require(data.module) - if (data == 'die') return process.exit(0) + if (data.event == 'die') return process.exit(0) handle(data) }) diff --git a/node_modules/worker-farm/lib/farm.js b/node_modules/worker-farm/lib/farm.js index ef0ab0e1052c7..60720dc561f6c 100644 --- a/node_modules/worker-farm/lib/farm.js +++ b/node_modules/worker-farm/lib/farm.js @@ -10,6 +10,7 @@ const DEFAULT_OPTIONS = { , maxRetries : Infinity , forcedKillTime : 100 , autoStart : false + , onChild : function() {} } const fork = require('./fork') @@ -29,8 +30,8 @@ function Farm (options, path) { Farm.prototype.mkhandle = function (method) { return function () { let args = Array.prototype.slice.call(arguments) - if (this.activeCalls >= this.options.maxConcurrentCalls) { - let err = new MaxConcurrentCallsError('Too many concurrent calls (' + this.activeCalls + ')') + if (this.activeCalls + this.callQueue.length >= this.options.maxConcurrentCalls) { + let err = new MaxConcurrentCallsError('Too many concurrent calls (active: ' + this.activeCalls + ', queued: ' + this.callQueue.length + ')') if (typeof args[args.length - 1] == 'function') return process.nextTick(args[args.length - 1].bind(null, err)) throw err @@ -113,7 +114,14 @@ Farm.prototype.startChild = function () { , exitCode : null } - forked.child.on('message', this.receive.bind(this)) + this.options.onChild(forked.child); + + forked.child.on('message', function(data) { + if (data.owner !== 'farm') { + return; + } + this.receive(data); + }.bind(this)) forked.child.once('exit', function (code) { c.exitCode = code this.onExit(id) @@ -128,7 +136,7 @@ Farm.prototype.startChild = function () { Farm.prototype.stopChild = function (childId) { let child = this.children[childId] if (child) { - child.send('die') + child.send({owner: 'farm', event: 'die'}) setTimeout(function () { if (child.exitCode === null) child.child.kill('SIGKILL') @@ -234,7 +242,8 @@ Farm.prototype.send = function (childId, call) { this.activeCalls++ child.send({ - idx : idx + owner : 'farm' + , idx : idx , child : childId , method : call.method , args : call.args diff --git a/node_modules/worker-farm/lib/fork.js b/node_modules/worker-farm/lib/fork.js index 2843df48474c5..5a035d9749e88 100644 --- a/node_modules/worker-farm/lib/fork.js +++ b/node_modules/worker-farm/lib/fork.js @@ -20,7 +20,7 @@ function fork (forkModule, workerOptions) { // this *should* be picked up by onExit and the operation requeued }) - child.send({ module: forkModule }) + child.send({ owner: 'farm', module: forkModule }) // return a send() function for this child return { diff --git a/node_modules/worker-farm/lib/index.js b/node_modules/worker-farm/lib/index.js index fe574e59b52f7..8c142227355a7 100644 --- a/node_modules/worker-farm/lib/index.js +++ b/node_modules/worker-farm/lib/index.js @@ -26,7 +26,7 @@ function end (api, callback) { for (let i = 0; i < farms.length; i++) if (farms[i] && farms[i].api === api) return farms[i].farm.end(callback) - process.nextTick(callback.bind(null, 'Worker farm not found!')) + process.nextTick(callback.bind(null, new Error('Worker farm not found!'))) } diff --git a/node_modules/worker-farm/package.json b/node_modules/worker-farm/package.json index ba43c44d4bea4..3d6181ae95783 100644 --- a/node_modules/worker-farm/package.json +++ b/node_modules/worker-farm/package.json @@ -1,45 +1,43 @@ { - "_args": [ - [ - "worker-farm@1.6.0", - "/Users/rebecca/code/npm" - ] - ], - "_from": "worker-farm@1.6.0", - "_id": "worker-farm@1.6.0", + "_from": "worker-farm@1.7.0", + "_id": "worker-farm@1.7.0", "_inBundle": false, - "_integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==", + "_integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", "_location": "/worker-farm", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, - "raw": "worker-farm@1.6.0", + "raw": "worker-farm@1.7.0", "name": "worker-farm", "escapedName": "worker-farm", - "rawSpec": "1.6.0", + "rawSpec": "1.7.0", "saveSpec": null, - "fetchSpec": "1.6.0" + "fetchSpec": "1.7.0" }, "_requiredBy": [ + "#USER", "/", "/libcipm" ], - "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz", - "_spec": "1.6.0", - "_where": "/Users/rebecca/code/npm", + "_resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "_shasum": "26a94c5391bbca926152002f69b84a4bf772e5a8", + "_spec": "worker-farm@1.7.0", + "_where": "/Users/isaacs/dev/npm/cli", "authors": [ "Rod Vagg @rvagg (https://github.com/rvagg)" ], "bugs": { "url": "https://github.com/rvagg/node-worker-farm/issues" }, + "bundleDependencies": false, "dependencies": { "errno": "~0.1.7" }, + "deprecated": false, "description": "Distribute processing tasks to child processes with an über-simple API and baked-in durability & custom concurrency options.", "devDependencies": { - "tape": "~4.9.0" + "tape": "~4.10.1" }, "homepage": "https://github.com/rvagg/node-worker-farm", "keywords": [ @@ -59,5 +57,5 @@ "test": "node ./tests/" }, "types": "./index.d.ts", - "version": "1.6.0" + "version": "1.7.0" } diff --git a/node_modules/worker-farm/tests/index.js b/node_modules/worker-farm/tests/index.js index ec9deabedda6c..e6be7bb7aabd6 100644 --- a/node_modules/worker-farm/tests/index.js +++ b/node_modules/worker-farm/tests/index.js @@ -51,6 +51,22 @@ tape('simple, exports.fn test', function (t) { }) +tape('on child', function (t) { + t.plan(2) + + let child = workerFarm({ onChild: function(subprocess) { childPid = subprocess.pid } }, childPath) + , childPid = null; + + child(0, function(err, pid) { + t.equal(childPid, pid) + }) + + workerFarm.end(child, function () { + t.ok(true, 'workerFarm ended') + }) +}) + + // use the returned pids to check that we're using a single child process // when maxConcurrentWorkers = 1 tape('single worker', function (t) { @@ -303,10 +319,12 @@ tape('multiple concurrent calls', function (t) { child(defer, function () { if (++cbc == count) { let time = Date.now() - start - // (defer * (count / callsPerWorker + 1)) - if precise it'd be count/callsPerWorker + let min = defer * 1.5 + // (defer * (count / callsPerWorker + 2)) - if precise it'd be count/callsPerWorker // but accounting for IPC and other overhead, we need to give it a bit of extra time, - // hence the +1 - t.ok(time > (defer * 1.5) && time < (defer * (count / callsPerWorker + 1)), 'processed tasks concurrently (' + time + 'ms)') + // hence the +2 + let max = defer * (count / callsPerWorker + 2) + t.ok(time > min && time < max, 'processed tasks concurrently (' + time + ' > ' + min + ' && ' + time + ' < ' + max + ')') workerFarm.end(child, function () { t.ok(true, 'workerFarm ended') }) @@ -474,6 +492,40 @@ tape('test maxConcurrentCalls', function (t) { }) +tape('test maxConcurrentCalls + queue', function (t) { + t.plan(13) + + let child = workerFarm({ maxConcurrentCalls: 4, maxConcurrentWorkers: 2, maxConcurrentCallsPerWorker: 1 }, childPath) + + child(20, function (err) { console.log('ended short1'); t.notOk(err, 'no error, short call 1') }) + child(20, function (err) { console.log('ended short2'); t.notOk(err, 'no error, short call 2') }) + child(300, function (err) { t.notOk(err, 'no error, long call 1') }) + child(300, function (err) { t.notOk(err, 'no error, long call 2') }) + child(20, function (err) { + t.ok(err, 'short call 3 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + child(20, function (err) { + t.ok(err, 'short call 4 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + + // cross fingers and hope the two short jobs have ended + setTimeout(function () { + child(20, function (err) { t.notOk(err, 'no error, delayed short call 1') }) + child(20, function (err) { t.notOk(err, 'no error, delayed short call 2') }) + child(20, function (err) { + t.ok(err, 'delayed short call 3 should error') + t.equal(err.type, 'MaxConcurrentCallsError', 'correct error type') + }) + + workerFarm.end(child, function () { + t.ok(true, 'workerFarm ended') + }) + }, 250) +}) + + // this test should not keep the process running! if the test process // doesn't die then the problem is here tape('test timeout kill', function (t) { @@ -529,12 +581,12 @@ tape('custom arguments can be passed to "fork"', function (t) { let cwd = fs.realpathSync(os.tmpdir()) , workerOptions = { cwd : cwd - , execArgv : ['--no-warnings'] + , execArgv : ['--expose-gc'] } , child = workerFarm({ maxConcurrentWorkers: 1, maxRetries: 5, workerOptions: workerOptions}, childPath, ['args']) child.args(function (err, result) { - t.equal(result.execArgv[0], '--no-warnings', 'flags passed (overridden default)') + t.equal(result.execArgv[0], '--expose-gc', 'flags passed (overridden default)') t.equal(result.cwd, cwd, 'correct cwd folder') }) diff --git a/node_modules/yallist/iterator.js b/node_modules/yallist/iterator.js index 4a15bf22c4003..d41c97a19f984 100644 --- a/node_modules/yallist/iterator.js +++ b/node_modules/yallist/iterator.js @@ -1,7 +1,8 @@ -var Yallist = require('./yallist.js') - -Yallist.prototype[Symbol.iterator] = function* () { - for (let walker = this.head; walker; walker = walker.next) { - yield walker.value +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } } } diff --git a/node_modules/yallist/package.json b/node_modules/yallist/package.json index f56d6b6a33497..a478663faf070 100644 --- a/node_modules/yallist/package.json +++ b/node_modules/yallist/package.json @@ -1,27 +1,27 @@ { - "_from": "yallist@^2.1.2", - "_id": "yallist@2.1.2", + "_from": "yallist@^3.0.2", + "_id": "yallist@3.0.3", "_inBundle": false, - "_integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "_integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", "_location": "/yallist", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, - "raw": "yallist@^2.1.2", + "raw": "yallist@^3.0.2", "name": "yallist", "escapedName": "yallist", - "rawSpec": "^2.1.2", + "rawSpec": "^3.0.2", "saveSpec": null, - "fetchSpec": "^2.1.2" + "fetchSpec": "^3.0.2" }, "_requiredBy": [ "/lru-cache" ], - "_resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "_shasum": "1c11f9218f076089a47dd512f93c6699a6a81d52", - "_spec": "yallist@^2.1.2", - "_where": "/Users/rebecca/code/npm/node_modules/lru-cache", + "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "_shasum": "b4b049e314be545e3ce802236d6cd22cd91c3de9", + "_spec": "yallist@^3.0.2", + "_where": "/Users/isaacs/dev/npm/cli/node_modules/lru-cache", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -35,7 +35,7 @@ "deprecated": false, "description": "Yet Another Linked List", "devDependencies": { - "tap": "^10.3.0" + "tap": "^12.1.0" }, "directories": { "test": "test" @@ -58,5 +58,5 @@ "preversion": "npm test", "test": "tap test/*.js --100" }, - "version": "2.1.2" + "version": "3.0.3" } diff --git a/node_modules/yallist/yallist.js b/node_modules/yallist/yallist.js index 518d23330b936..b0ab36cf31b7a 100644 --- a/node_modules/yallist/yallist.js +++ b/node_modules/yallist/yallist.js @@ -1,3 +1,4 @@ +'use strict' module.exports = Yallist Yallist.Node = Node @@ -368,3 +369,8 @@ function Node (value, prev, next, list) { this.next = null } } + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/package-lock.json b/package-lock.json index a0876166f9c28..6b32f59e9fab9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -485,14 +485,6 @@ "readable-stream": "^3.0.1" } }, - "block-stream": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", - "integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", - "requires": { - "inherits": "~2.0.0" - } - }, "bluebird": { "version": "3.5.5", "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", @@ -592,19 +584,6 @@ "once": "^1.3.0", "path-is-absolute": "^1.0.0" } - }, - "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "requires": { - "yallist": "^3.0.2" - } - }, - "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" } } }, @@ -639,9 +618,9 @@ } }, "call-limit": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.0.tgz", - "integrity": "sha1-b9YbA/PaQqLNDsK2DwK9DnGZH+o=" + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/call-limit/-/call-limit-1.1.1.tgz", + "integrity": "sha512-5twvci5b9eRBw2wCfPtN0GmlR2/gadZqyFpPhOK6CvMFoFgA+USnZ6Jpu1lhG9h85pQ3Ouil3PfXWRD4EUaRiQ==" }, "caller": { "version": "1.0.1", @@ -1130,6 +1109,22 @@ "lru-cache": "^4.0.1", "shebang-command": "^1.2.0", "which": "^1.2.9" + }, + "dependencies": { + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + } } }, "crypto-random-string": { @@ -2040,7 +2035,25 @@ "requires": { "lru-cache": "^4.0.1", "which": "^1.2.9" + }, + "dependencies": { + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + } } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "dev": true } } }, @@ -2176,17 +2189,6 @@ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, - "fstream": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", - "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", - "requires": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - } - }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2296,9 +2298,9 @@ } }, "glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.4.tgz", + "integrity": "sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -2578,9 +2580,9 @@ } }, "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { "version": "1.3.5", @@ -3215,9 +3217,9 @@ } }, "libnpmsearch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.0.tgz", - "integrity": "sha512-vd+JWbTGzOSfiOc+72MU6y7WqmBXn49egCCrIXp27iE/88bX8EpG64ST1blWQI1bSMUr9l1AKPMVsqa2tS5KWA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/libnpmsearch/-/libnpmsearch-2.0.1.tgz", + "integrity": "sha512-K0yXyut9MHHCAH+DOiglQCpmBKPZXSUu76+BE2maSEfQN15OwNaA/Aiioe9lRFlVFOr7WcuJCY+VSl+gLi9NTA==", "requires": { "figgy-pudding": "^3.5.1", "get-stream": "^4.0.0", @@ -3438,12 +3440,11 @@ "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" }, "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" } }, "make-dir": { @@ -3461,16 +3462,16 @@ "dev": true }, "make-fetch-happen": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.1.tgz", - "integrity": "sha512-7R5ivfy9ilRJ1EMKIOziwrns9fGeAD4bAha8EB7BIiBBLHm2KeTUGCrICFt2rbHfzheTLynv50GnNTK1zDTrcQ==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.2.tgz", + "integrity": "sha512-YMJrAjHSb/BordlsDEcVcPyTbiJKkzqMf48N8dAJZT9Zjctrkb6Yg4TY9Sq2AwSIQJFn5qBBKVTYt3vP5FMIHA==", "requires": { "agentkeepalive": "^3.4.1", - "cacache": "^11.0.1", + "cacache": "^11.3.3", "http-cache-semantics": "^3.8.1", "http-proxy-agent": "^2.1.0", "https-proxy-agent": "^2.2.1", - "lru-cache": "^4.1.2", + "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "node-fetch-npm": "^2.0.2", "promise-retry": "^1.1.1", @@ -3488,27 +3489,16 @@ } }, "marked": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.6.0.tgz", - "integrity": "sha512-HduzIW2xApSXKXJSpCipSxKyvMbwRRa/TwMbepmlZziKdH8548WSoDP4SxzulEKjlo8BE39l+2fwJZuRKOln6g==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/marked/-/marked-0.6.3.tgz", + "integrity": "sha512-Fqa7eq+UaxfMriqzYLayfqAE40WN03jf+zHjT18/uXNuzjq3TY0XTbrAoPeqSJrAmPz11VuUA+kBPYOhHt9oOQ==", "dev": true }, "marked-man": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/marked-man/-/marked-man-0.2.1.tgz", - "integrity": "sha1-8lknFIHeO1ByY0ifUiG3xaz9I4M=", - "dev": true, - "requires": { - "marked": "^0.3.2" - }, - "dependencies": { - "marked": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/marked/-/marked-0.3.19.tgz", - "integrity": "sha512-ea2eGWOqNxPcXv8dyERdSr/6FmzvWwzjMxpfGB/sbMccXoct+xY+YukPD+QTUZwyvK7BZwcr4m21WBOW41pAkg==", - "dev": true - } - } + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/marked-man/-/marked-man-0.6.0.tgz", + "integrity": "sha512-lqzGe2uPo0KPO5J5yyfZ6PbEpZS6VIMHIzltXNzdwTNBysD0pNLtTaGrtT5R4aFT8UaVR3Fuz9rN3SEFYnea5Q==", + "dev": true }, "meant": { "version": "1.0.1", @@ -3709,11 +3699,10 @@ } }, "node-gyp": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", - "integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-4.0.0.tgz", + "integrity": "sha512-2XiryJ8sICNo6ej8d0idXDEMKfVfFK7kekGCtJAuelGsYHQxhj13KTf95swTCN2dZ/4lTfZ84Fu31jqJEEgjWA==", "requires": { - "fstream": "^1.0.0", "glob": "^7.0.3", "graceful-fs": "^4.1.2", "mkdirp": "^0.5.0", @@ -3723,7 +3712,7 @@ "request": "^2.87.0", "rimraf": "2", "semver": "~5.3.0", - "tar": "^2.0.0", + "tar": "^4.4.8", "which": "1" }, "dependencies": { @@ -3739,16 +3728,6 @@ "version": "5.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", "integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=" - }, - "tar": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz", - "integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==", - "requires": { - "block-stream": "*", - "fstream": "^1.0.12", - "inherits": "2" - } } } }, @@ -3816,13 +3795,13 @@ "dev": true }, "npm-lifecycle": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.0.tgz", - "integrity": "sha512-QbBfLlGBKsktwBZLj6AviHC6Q9Y3R/AY4a2PYSIRhSKSS0/CxRyD/PfxEX6tPeOCXQgMSNdwGeECacstgptc+g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/npm-lifecycle/-/npm-lifecycle-2.1.1.tgz", + "integrity": "sha512-+Vg6I60Z75V/09pdcH5iUo/99Q/vop35PaI99elvxk56azSVVsdsSsS/sXqKDNwbRRNN1qSxkcO45ZOu0yOWew==", "requires": { "byline": "^5.0.0", - "graceful-fs": "^4.1.11", - "node-gyp": "^3.8.0", + "graceful-fs": "^4.1.15", + "node-gyp": "^4.0.0", "resolve-from": "^4.0.0", "slide": "^1.1.6", "uid-number": "0.0.6", @@ -3895,16 +3874,36 @@ } }, "npm-registry-fetch": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.9.0.tgz", - "integrity": "sha512-srwmt8YhNajAoSAaDWndmZgx89lJwIZ1GWxOuckH4Coek4uHv5S+o/l9FLQe/awA+JwTnj4FJHldxhlXdZEBmw==", + "version": "3.9.1", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.9.1.tgz", + "integrity": "sha512-VQCEZlydXw4AwLROAXWUR7QDfe2Y8Id/vpAgp6TI1/H78a4SiQ1kQrKZALm5/zxM5n4HIi+aYb+idUAV/RuY0Q==", "requires": { "JSONStream": "^1.3.4", "bluebird": "^3.5.1", "figgy-pudding": "^3.4.1", - "lru-cache": "^4.1.3", - "make-fetch-happen": "^4.0.1", + "lru-cache": "^5.1.1", + "make-fetch-happen": "^4.0.2", "npm-package-arg": "^6.1.0" + }, + "dependencies": { + "make-fetch-happen": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-4.0.2.tgz", + "integrity": "sha512-YMJrAjHSb/BordlsDEcVcPyTbiJKkzqMf48N8dAJZT9Zjctrkb6Yg4TY9Sq2AwSIQJFn5qBBKVTYt3vP5FMIHA==", + "requires": { + "agentkeepalive": "^3.4.1", + "cacache": "^11.3.3", + "http-cache-semantics": "^3.8.1", + "http-proxy-agent": "^2.1.0", + "https-proxy-agent": "^2.2.1", + "lru-cache": "^5.1.1", + "mississippi": "^3.0.0", + "node-fetch-npm": "^2.0.2", + "promise-retry": "^1.1.1", + "socks-proxy-agent": "^4.0.0", + "ssri": "^6.0.0" + } + } } }, "npm-registry-mock": { @@ -4454,14 +4453,6 @@ "which": "^1.3.1" }, "dependencies": { - "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "requires": { - "yallist": "^3.0.2" - } - }, "minipass": { "version": "2.3.5", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", @@ -4470,11 +4461,6 @@ "safe-buffer": "^5.1.2", "yallist": "^3.0.0" } - }, - "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" } } }, @@ -4816,11 +4802,12 @@ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" }, "query-string": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-6.4.0.tgz", - "integrity": "sha512-Werid2I41/tJTqOGPJ3cC3vwrIh/8ZupBQbp7BSsqXzr+pTin3aMJ/EZb8UEuk7ZO3VqQFvq2qck/ihc6wqIdw==", + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-6.8.1.tgz", + "integrity": "sha512-g6y0Lbq10a5pPQpjlFuojfMfV1Pd2Jw9h75ypiYPPia3Gcq2rgkKiIwbkS6JxH7c5f5u/B/sB+d13PU+g1eu4Q==", "requires": { "decode-uri-component": "^0.2.0", + "split-on-first": "^1.0.0", "strict-uri-encode": "^2.0.0" } }, @@ -4927,9 +4914,9 @@ } }, "readable-stream": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.3.0.tgz", - "integrity": "sha512-EsI+s3k3XsW+fU8fQACLN59ky34AZ14LoeVZpYwmZvldCFo0r0gnelwF2TcMjLor/BTL5aDJVBMkss0dthToPw==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -5383,6 +5370,11 @@ "spdx-ranges": "^2.0.0" } }, + "split-on-first": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", + "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==" + }, "sprintf-js": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.2.tgz", @@ -5909,9 +5901,9 @@ } }, "tar-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.0.1.tgz", - "integrity": "sha512-I6OJF7wE62BC6zNPdHDtseK0D0187PBjbKSLYY4ffvVkBM6tyBn2O9plDvVM2229/mozfEL/X3++qSvYYQE2xw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.0.tgz", + "integrity": "sha512-+DAn4Nb4+gz6WZigRzKEZl1QuJVOLtAwwF+WUxy1fJ6X63CaGaUAxJRD2KEn1OMfcbCjySTYpNC6WmfQoIEOdw==", "dev": true, "requires": { "bl": "^3.0.0", @@ -6441,9 +6433,9 @@ "dev": true }, "worker-farm": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz", - "integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", "requires": { "errno": "~0.1.7" } @@ -6509,9 +6501,9 @@ "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" }, "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", + "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" }, "yapool": { "version": "1.0.0", diff --git a/package.json b/package.json index 80143f7c53cb6..eb1fc07f8d8b0 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "bluebird": "^3.5.5", "byte-size": "^5.0.1", "cacache": "^11.3.3", - "call-limit": "~1.1.0", + "call-limit": "^1.1.1", "chownr": "^1.1.1", "ci-info": "^2.0.0", "cli-columns": "^3.1.2", @@ -60,13 +60,13 @@ "fs-vacuum": "~1.2.10", "fs-write-stream-atomic": "~1.0.10", "gentle-fs": "^2.0.1", - "glob": "^7.1.3", + "glob": "^7.1.4", "graceful-fs": "^4.2.0", "has-unicode": "~2.0.1", "hosted-git-info": "^2.7.1", "iferr": "^1.0.2", "inflight": "~1.0.6", - "inherits": "~2.0.3", + "inherits": "^2.0.4", "ini": "^1.3.5", "init-package-json": "^1.10.3", "is-cidr": "^3.0.0", @@ -75,6 +75,7 @@ "libcipm": "^3.0.3", "libnpm": "^2.0.1", "libnpmhook": "^5.0.2", + "libnpmsearch": "^2.0.1", "libnpx": "^10.2.0", "lock-verify": "^2.1.0", "lockfile": "^1.0.4", @@ -83,22 +84,22 @@ "lodash.union": "~4.6.0", "lodash.uniq": "~4.5.0", "lodash.without": "~4.4.0", - "lru-cache": "^4.1.5", + "lru-cache": "^5.1.1", "meant": "~1.0.1", "mississippi": "^3.0.0", "mkdirp": "~0.5.1", "move-concurrently": "^1.0.1", - "node-gyp": "^3.8.0", + "node-gyp": "^4.0.0", "nopt": "~4.0.1", "normalize-package-data": "^2.5.0", "npm-audit-report": "^1.3.2", "npm-cache-filename": "~1.0.2", "npm-install-checks": "~3.0.0", - "npm-lifecycle": "^2.1.0", + "npm-lifecycle": "^2.1.1", "npm-package-arg": "^6.1.0", "npm-packlist": "^1.4.4", "npm-pick-manifest": "^2.2.3", - "npm-registry-fetch": "^3.9.0", + "npm-registry-fetch": "^3.9.1", "npm-user-validate": "~1.0.0", "npmlog": "~4.1.2", "once": "~1.4.0", @@ -108,14 +109,14 @@ "path-is-inside": "~1.0.2", "promise-inflight": "~1.0.1", "qrcode-terminal": "^0.12.0", - "query-string": "^6.4.0", + "query-string": "^6.8.1", "qw": "~1.0.1", "read": "~1.0.7", "read-cmd-shim": "~1.0.1", "read-installed": "~4.0.3", "read-package-json": "^2.0.13", "read-package-tree": "^5.3.1", - "readable-stream": "^3.3.0", + "readable-stream": "^3.4.0", "readdir-scoped-modules": "^1.1.0", "request": "^2.88.0", "retry": "^0.12.0", @@ -140,7 +141,7 @@ "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "~3.0.0", "which": "^1.3.1", - "worker-farm": "^1.6.0", + "worker-farm": "^1.7.0", "write-file-atomic": "^2.4.3" }, "bundleDependencies": [ @@ -271,8 +272,8 @@ "deep-equal": "^1.0.1", "get-stream": "^4.1.0", "licensee": "^7.0.2", - "marked": "^0.6.0", - "marked-man": "^0.2.1", + "marked": "^0.6.3", + "marked-man": "^0.6.0", "npm-registry-couchapp": "^2.7.2", "npm-registry-mock": "^1.2.1", "require-inject": "^1.4.4", @@ -280,7 +281,7 @@ "standard": "^11.0.1", "tacks": "^1.3.0", "tap": "^12.7.0", - "tar-stream": "^2.0.1" + "tar-stream": "^2.1.0" }, "scripts": { "dumpconf": "env | grep npm | sort | uniq", diff --git a/test/common-tap.js b/test/common-tap.js index d54a869995451..e15d5dab2487d 100644 --- a/test/common-tap.js +++ b/test/common-tap.js @@ -7,6 +7,12 @@ var readCmdShim = require('read-cmd-shim') var isWindows = require('../lib/utils/is-windows.js') var Bluebird = require('bluebird') +// remove any git envs so that we don't mess with the main repo +// when running git subprocesses in tests +Object.keys(process.env).filter(k => /^GIT/.test(k)).forEach( + k => delete process.env[k] +) + // cheesy hackaround for test deps (read: nock) that rely on setImmediate if (!global.setImmediate || !require('timers').setImmediate) { require('timers').setImmediate = global.setImmediate = function () { diff --git a/test/tap/search.all-package-search.js b/test/tap/search.all-package-search.js index 419e4fdeed691..1115dcaf1ae3f 100644 --- a/test/tap/search.all-package-search.js +++ b/test/tap/search.all-package-search.js @@ -146,6 +146,7 @@ searches.forEach(function (search) { const query = qs.stringify({ text: search.term, size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 diff --git a/test/tap/search.js b/test/tap/search.js index 70d17e5469d47..a7e269df7abd0 100644 --- a/test/tap/search.js +++ b/test/tap/search.js @@ -35,6 +35,7 @@ test('notifies when there are no results', function (t) { const query = qs.stringify({ text: 'none', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -60,6 +61,7 @@ test('spits out a useful error when no cache nor network', function (t) { const query = qs.stringify({ text: 'foo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -91,6 +93,7 @@ test('can switch to JSON mode', function (t) { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -130,6 +133,7 @@ test('JSON mode does not notify on empty', function (t) { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -157,6 +161,7 @@ test('can switch to tab separated mode', function (t) { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5 @@ -187,6 +192,7 @@ test('tab mode does not notify on empty', function (t) { const query = qs.stringify({ text: 'oo', size: 20, + from: 0, quality: 0.65, popularity: 0.98, maintenance: 0.5