From 5730d17198e066077cb3ea6f78753746afc13603 Mon Sep 17 00:00:00 2001 From: Gar Date: Tue, 1 Nov 2022 13:59:35 -0700 Subject: [PATCH] deps: tar@6.1.12 --- node_modules/tar/lib/create.js | 25 ++-- node_modules/tar/lib/extract.js | 26 +++-- node_modules/tar/lib/header.js | 84 ++++++++------ node_modules/tar/lib/large-numbers.js | 39 ++++--- node_modules/tar/lib/list.js | 33 +++--- node_modules/tar/lib/mkdir.js | 64 +++++++---- node_modules/tar/lib/mode-fix.js | 12 +- node_modules/tar/lib/normalize-unicode.js | 5 +- node_modules/tar/lib/pack.js | 85 +++++++++----- node_modules/tar/lib/parse.js | 120 ++++++++++++-------- node_modules/tar/lib/path-reservations.js | 44 ++++--- node_modules/tar/lib/pax.js | 23 ++-- node_modules/tar/lib/read-entry.js | 21 ++-- node_modules/tar/lib/replace.js | 77 ++++++++----- node_modules/tar/lib/strip-absolute-path.js | 2 +- node_modules/tar/lib/unpack.js | 117 ++++++++++++------- node_modules/tar/lib/update.js | 12 +- node_modules/tar/lib/warn-mixin.js | 11 +- node_modules/tar/lib/write-entry.js | 67 +++++++---- node_modules/tar/package.json | 68 ++++++----- package-lock.json | 9 +- package.json | 2 +- 22 files changed, 589 insertions(+), 357 deletions(-) diff --git a/node_modules/tar/lib/create.js b/node_modules/tar/lib/create.js index d033640ac3b6c..9c860d4e4a764 100644 --- a/node_modules/tar/lib/create.js +++ b/node_modules/tar/lib/create.js @@ -9,24 +9,29 @@ const t = require('./list.js') const path = require('path') module.exports = (opt_, files, cb) => { - if (typeof files === 'function') + if (typeof files === 'function') { cb = files + } - if (Array.isArray(opt_)) + if (Array.isArray(opt_)) { files = opt_, opt_ = {} + } - if (!files || !Array.isArray(files) || !files.length) + if (!files || !Array.isArray(files) || !files.length) { throw new TypeError('no files or directories specified') + } files = Array.from(files) const opt = hlo(opt_) - if (opt.sync && typeof cb === 'function') + if (opt.sync && typeof cb === 'function') { throw new TypeError('callback not supported for sync tar functions') + } - if (!opt.file && typeof cb === 'function') + if (!opt.file && typeof cb === 'function') { throw new TypeError('callback only supported with file option') + } return opt.file && opt.sync ? createFileSync(opt, files) : opt.file ? createFile(opt, files, cb) @@ -65,13 +70,14 @@ const addFilesSync = (p, files) => { files.forEach(file => { if (file.charAt(0) === '@') { t({ - file: path.resolve(p.cwd, file.substr(1)), + file: path.resolve(p.cwd, file.slice(1)), sync: true, noResume: true, onentry: entry => p.add(entry), }) - } else + } else { p.add(file) + } }) p.end() } @@ -81,12 +87,13 @@ const addFilesAsync = (p, files) => { const file = files.shift() if (file.charAt(0) === '@') { return t({ - file: path.resolve(p.cwd, file.substr(1)), + file: path.resolve(p.cwd, file.slice(1)), noResume: true, onentry: entry => p.add(entry), }).then(_ => addFilesAsync(p, files)) - } else + } else { p.add(file) + } } p.end() } diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js index 98e946ec5bfbb..54767982583f2 100644 --- a/node_modules/tar/lib/extract.js +++ b/node_modules/tar/lib/extract.js @@ -9,29 +9,35 @@ const path = require('path') const stripSlash = require('./strip-trailing-slashes.js') module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') + if (typeof opt_ === 'function') { cb = opt_, files = null, opt_ = {} - else if (Array.isArray(opt_)) + } else if (Array.isArray(opt_)) { files = opt_, opt_ = {} + } - if (typeof files === 'function') + if (typeof files === 'function') { cb = files, files = null + } - if (!files) + if (!files) { files = [] - else + } else { files = Array.from(files) + } const opt = hlo(opt_) - if (opt.sync && typeof cb === 'function') + if (opt.sync && typeof cb === 'function') { throw new TypeError('callback not supported for sync tar functions') + } - if (!opt.file && typeof cb === 'function') + if (!opt.file && typeof cb === 'function') { throw new TypeError('callback only supported with file option') + } - if (files.length) + if (files.length) { filesFilter(opt, files) + } return opt.file && opt.sync ? extractFileSync(opt) : opt.file ? extractFile(opt, cb) @@ -87,9 +93,9 @@ const extractFile = (opt, cb) => { // This trades a zero-byte read() syscall for a stat // However, it will usually result in less memory allocation fs.stat(file, (er, stat) => { - if (er) + if (er) { reject(er) - else { + } else { const stream = new fsm.ReadStream(file, { readSize: readSize, size: stat.size, diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js index 129504048dfab..411d5e45e879a 100644 --- a/node_modules/tar/lib/header.js +++ b/node_modules/tar/lib/header.js @@ -34,18 +34,21 @@ class Header { this.atime = null this.ctime = null - if (Buffer.isBuffer(data)) + if (Buffer.isBuffer(data)) { this.decode(data, off || 0, ex, gex) - else if (data) + } else if (data) { this.set(data) + } } decode (buf, off, ex, gex) { - if (!off) + if (!off) { off = 0 + } - if (!buf || !(buf.length >= off + 512)) + if (!buf || !(buf.length >= off + 512)) { throw new Error('need 512 bytes for header') + } this.path = decString(buf, off, 100) this.mode = decNumber(buf, off + 100, 8) @@ -62,18 +65,21 @@ class Header { // old tar versions marked dirs as a file with a trailing / this[TYPE] = decString(buf, off + 156, 1) - if (this[TYPE] === '') + if (this[TYPE] === '') { this[TYPE] = '0' - if (this[TYPE] === '0' && this.path.substr(-1) === '/') + } + if (this[TYPE] === '0' && this.path.slice(-1) === '/') { this[TYPE] = '5' + } // tar implementations sometimes incorrectly put the stat(dir).size // as the size in the tarball, even though Directory entries are // not able to have any body at all. In the very rare chance that // it actually DOES have a body, we weren't going to do anything with // it anyway, and it'll just be a warning about an invalid header. - if (this[TYPE] === '5') + if (this[TYPE] === '5') { this.size = 0 + } this.linkpath = decString(buf, off + 157, 100) if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') { @@ -87,23 +93,27 @@ class Header { this.path = prefix + '/' + this.path } else { const prefix = decString(buf, off + 345, 130) - if (prefix) + if (prefix) { this.path = prefix + '/' + this.path + } this.atime = decDate(buf, off + 476, 12) this.ctime = decDate(buf, off + 488, 12) } } let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) + for (let i = off; i < off + 148; i++) { sum += buf[i] + } - for (let i = off + 156; i < off + 512; i++) + for (let i = off + 156; i < off + 512; i++) { sum += buf[i] + } this.cksumValid = sum === this.cksum - if (this.cksum === null && sum === 8 * 0x20) + if (this.cksum === null && sum === 8 * 0x20) { this.nullBlock = true + } } [SLURP] (ex, global) { @@ -111,8 +121,9 @@ class Header { // we slurp in everything except for the path attribute in // a global extended header, because that's weird. if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) + !(global && k === 'path')) { this[k] = ex[k] + } } } @@ -122,11 +133,13 @@ class Header { off = 0 } - if (!off) + if (!off) { off = 0 + } - if (!(buf.length >= off + 512)) + if (!(buf.length >= off + 512)) { throw new Error('need 512 bytes for header') + } const prefixSize = this.ctime || this.atime ? 130 : 155 const split = splitPrefix(this.path || '', prefixSize) @@ -148,20 +161,22 @@ class Header { this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax - if (buf[off + 475] !== 0) + if (buf[off + 475] !== 0) { this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax - else { + } else { this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax } let sum = 8 * 0x20 - for (let i = off; i < off + 148; i++) + for (let i = off; i < off + 148; i++) { sum += buf[i] + } - for (let i = off + 156; i < off + 512; i++) + for (let i = off + 156; i < off + 512; i++) { sum += buf[i] + } this.cksum = sum encNumber(buf, off + 148, 8, this.cksum) @@ -172,8 +187,9 @@ class Header { set (data) { for (const i in data) { - if (data[i] !== null && data[i] !== undefined) + if (data[i] !== null && data[i] !== undefined) { this[i] = data[i] + } } } @@ -186,10 +202,11 @@ class Header { } set type (type) { - if (types.code.has(type)) + if (types.code.has(type)) { this[TYPE] = types.code.get(type) - else + } else { this[TYPE] = type + } } } @@ -200,25 +217,23 @@ const splitPrefix = (p, prefixSize) => { let ret const root = pathModule.parse(p).root || '.' - if (Buffer.byteLength(pp) < pathSize) + if (Buffer.byteLength(pp) < pathSize) { ret = [pp, prefix, false] - else { + } else { // first set prefix to the dir, and path to the base prefix = pathModule.dirname(pp) pp = pathModule.basename(pp) do { - // both fit! if (Buffer.byteLength(pp) <= pathSize && - Buffer.byteLength(prefix) <= prefixSize) + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! ret = [pp, prefix, false] - - // prefix fits in prefix, but path doesn't fit in path - else if (Buffer.byteLength(pp) > pathSize && - Buffer.byteLength(prefix) <= prefixSize) - ret = [pp.substr(0, pathSize - 1), prefix, true] - - else { + } else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true] + } else { // make path take a bit from prefix pp = pathModule.join(pathModule.basename(prefix), pp) prefix = pathModule.dirname(prefix) @@ -226,8 +241,9 @@ const splitPrefix = (p, prefixSize) => { } while (prefix !== root && !ret) // at this point, found no resolution, just truncate - if (!ret) - ret = [p.substr(0, pathSize - 1), '', true] + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true] + } } return ret } diff --git a/node_modules/tar/lib/large-numbers.js b/node_modules/tar/lib/large-numbers.js index dd6f690b9a8d9..b11e72d996fde 100644 --- a/node_modules/tar/lib/large-numbers.js +++ b/node_modules/tar/lib/large-numbers.js @@ -3,14 +3,15 @@ // 0xff for negative, and 0x80 for positive. const encode = (num, buf) => { - if (!Number.isSafeInteger(num)) - // The number is so large that javascript cannot represent it with integer - // precision. + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. throw Error('cannot encode number outside of javascript safe integer range') - else if (num < 0) + } else if (num < 0) { encodeNegative(num, buf) - else + } else { encodePositive(num, buf) + } return buf } @@ -30,11 +31,11 @@ const encodeNegative = (num, buf) => { for (var i = buf.length; i > 1; i--) { var byte = num & 0xff num = Math.floor(num / 0x100) - if (flipped) + if (flipped) { buf[i - 1] = onesComp(byte) - else if (byte === 0) + } else if (byte === 0) { buf[i - 1] = 0 - else { + } else { flipped = true buf[i - 1] = twosComp(byte) } @@ -46,13 +47,15 @@ const parse = (buf) => { const value = pre === 0x80 ? pos(buf.slice(1, buf.length)) : pre === 0xff ? twos(buf) : null - if (value === null) + if (value === null) { throw Error('invalid base256 encoding') + } - if (!Number.isSafeInteger(value)) - // The number is so large that javascript cannot represent it with integer - // precision. + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. throw Error('parsed number outside of javascript safe integer range') + } return value } @@ -64,16 +67,17 @@ const twos = (buf) => { for (var i = len - 1; i > -1; i--) { var byte = buf[i] var f - if (flipped) + if (flipped) { f = onesComp(byte) - else if (byte === 0) + } else if (byte === 0) { f = byte - else { + } else { flipped = true f = twosComp(byte) } - if (f !== 0) + if (f !== 0) { sum -= f * Math.pow(256, len - i - 1) + } } return sum } @@ -83,8 +87,9 @@ const pos = (buf) => { var sum = 0 for (var i = len - 1; i > -1; i--) { var byte = buf[i] - if (byte !== 0) + if (byte !== 0) { sum += byte * Math.pow(256, len - i - 1) + } } return sum } diff --git a/node_modules/tar/lib/list.js b/node_modules/tar/lib/list.js index a0c1cf2fbc7ea..f2358c25410b5 100644 --- a/node_modules/tar/lib/list.js +++ b/node_modules/tar/lib/list.js @@ -12,32 +12,39 @@ const path = require('path') const stripSlash = require('./strip-trailing-slashes.js') module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') + if (typeof opt_ === 'function') { cb = opt_, files = null, opt_ = {} - else if (Array.isArray(opt_)) + } else if (Array.isArray(opt_)) { files = opt_, opt_ = {} + } - if (typeof files === 'function') + if (typeof files === 'function') { cb = files, files = null + } - if (!files) + if (!files) { files = [] - else + } else { files = Array.from(files) + } const opt = hlo(opt_) - if (opt.sync && typeof cb === 'function') + if (opt.sync && typeof cb === 'function') { throw new TypeError('callback not supported for sync tar functions') + } - if (!opt.file && typeof cb === 'function') + if (!opt.file && typeof cb === 'function') { throw new TypeError('callback only supported with file option') + } - if (files.length) + if (files.length) { filesFilter(opt, files) + } - if (!opt.noResume) + if (!opt.noResume) { onentryFunction(opt) + } return opt.file && opt.sync ? listFileSync(opt) : opt.file ? listFile(opt, cb) @@ -81,9 +88,9 @@ const listFileSync = opt => { try { const stat = fs.statSync(file) const readSize = opt.maxReadSize || 16 * 1024 * 1024 - if (stat.size < readSize) + if (stat.size < readSize) { p.end(fs.readFileSync(file)) - else { + } else { let pos = 0 const buf = Buffer.allocUnsafe(readSize) fd = fs.openSync(file, 'r') @@ -114,9 +121,9 @@ const listFile = (opt, cb) => { parse.on('end', resolve) fs.stat(file, (er, stat) => { - if (er) + if (er) { reject(er) - else { + } else { const stream = new fsm.ReadStream(file, { readSize: readSize, size: stat.size, diff --git a/node_modules/tar/lib/mkdir.js b/node_modules/tar/lib/mkdir.js index a0719e6c36ed3..8ee8de7852d12 100644 --- a/node_modules/tar/lib/mkdir.js +++ b/node_modules/tar/lib/mkdir.js @@ -39,8 +39,9 @@ const cSet = (cache, key, val) => cache.set(normPath(key), val) const checkCwd = (dir, cb) => { fs.stat(dir, (er, st) => { - if (er || !st.isDirectory()) + if (er || !st.isDirectory()) { er = new CwdError(dir, er && er.code || 'ENOTDIR') + } cb(er) }) } @@ -66,27 +67,31 @@ module.exports = (dir, opt, cb) => { const cwd = normPath(opt.cwd) const done = (er, created) => { - if (er) + if (er) { cb(er) - else { + } else { cSet(cache, dir, true) - if (created && doChown) + if (created && doChown) { chownr(created, uid, gid, er => done(er)) - else if (needChmod) + } else if (needChmod) { fs.chmod(dir, mode, cb) - else + } else { cb() + } } } - if (cache && cGet(cache, dir) === true) + if (cache && cGet(cache, dir) === true) { return done() + } - if (dir === cwd) + if (dir === cwd) { return checkCwd(dir, done) + } - if (preserve) - return mkdirp(dir, {mode}).then(made => done(null, made), done) + if (preserve) { + return mkdirp(dir, { mode }).then(made => done(null, made), done) + } const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') @@ -94,12 +99,14 @@ module.exports = (dir, opt, cb) => { } const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { - if (!parts.length) + if (!parts.length) { return cb(null, created) + } const p = parts.shift() const part = normPath(path.resolve(base + '/' + p)) - if (cGet(cache, part)) + if (cGet(cache, part)) { return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) } @@ -109,18 +116,20 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { if (statEr) { statEr.path = statEr.path && normPath(statEr.path) cb(statEr) - } else if (st.isDirectory()) + } else if (st.isDirectory()) { mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) - else if (unlink) { + } else if (unlink) { fs.unlink(part, er => { - if (er) + if (er) { return cb(er) + } fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) }) - } else if (st.isSymbolicLink()) + } else if (st.isSymbolicLink()) { return cb(new SymlinkError(part, part + '/' + parts.join('/'))) - else + } else { cb(er) + } }) } else { created = created || part @@ -136,8 +145,9 @@ const checkCwdSync = dir => { } catch (er) { code = er.code } finally { - if (!ok) + if (!ok) { throw new CwdError(dir, code) + } } } @@ -162,22 +172,26 @@ module.exports.sync = (dir, opt) => { const done = (created) => { cSet(cache, dir, true) - if (created && doChown) + if (created && doChown) { chownr.sync(created, uid, gid) - if (needChmod) + } + if (needChmod) { fs.chmodSync(dir, mode) + } } - if (cache && cGet(cache, dir) === true) + if (cache && cGet(cache, dir) === true) { return done() + } if (dir === cwd) { checkCwdSync(cwd) return done() } - if (preserve) + if (preserve) { return done(mkdirp.sync(dir, mode)) + } const sub = normPath(path.relative(cwd, dir)) const parts = sub.split('/') @@ -186,8 +200,9 @@ module.exports.sync = (dir, opt) => { p && (part += '/' + p); p = parts.shift()) { part = normPath(path.resolve(part)) - if (cGet(cache, part)) + if (cGet(cache, part)) { continue + } try { fs.mkdirSync(part, mode) @@ -204,8 +219,9 @@ module.exports.sync = (dir, opt) => { created = created || part cSet(cache, part, true) continue - } else if (st.isSymbolicLink()) + } else if (st.isSymbolicLink()) { return new SymlinkError(part, part + '/' + parts.join('/')) + } } } diff --git a/node_modules/tar/lib/mode-fix.js b/node_modules/tar/lib/mode-fix.js index 6a045ffcaec5b..42f1d6e657b1a 100644 --- a/node_modules/tar/lib/mode-fix.js +++ b/node_modules/tar/lib/mode-fix.js @@ -7,17 +7,21 @@ module.exports = (mode, isDir, portable) => { // (as some linux distros do), then we'll write the // archive with 0o644 instead. Also, don't ever create // a file that is not readable/writable by the owner. - if (portable) + if (portable) { mode = (mode | 0o600) & ~0o22 + } // if dirs are readable, then they should be listable if (isDir) { - if (mode & 0o400) + if (mode & 0o400) { mode |= 0o100 - if (mode & 0o40) + } + if (mode & 0o40) { mode |= 0o10 - if (mode & 0o4) + } + if (mode & 0o4) { mode |= 0o1 + } } return mode } diff --git a/node_modules/tar/lib/normalize-unicode.js b/node_modules/tar/lib/normalize-unicode.js index 4aeb1d50db9e1..43dc406ecedb9 100644 --- a/node_modules/tar/lib/normalize-unicode.js +++ b/node_modules/tar/lib/normalize-unicode.js @@ -3,9 +3,10 @@ // within npm install on large package trees. // Do not edit without careful benchmarking. const normalizeCache = Object.create(null) -const {hasOwnProperty} = Object.prototype +const { hasOwnProperty } = Object.prototype module.exports = s => { - if (!hasOwnProperty.call(normalizeCache, s)) + if (!hasOwnProperty.call(normalizeCache, s)) { normalizeCache[s] = s.normalize('NFKD') + } return normalizeCache[s] } diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js index 9522c10bfe4a4..a3f4ff220abbb 100644 --- a/node_modules/tar/lib/pack.js +++ b/node_modules/tar/lib/pack.js @@ -73,23 +73,27 @@ const Pack = warner(class Pack extends MiniPass { this.readdirCache = opt.readdirCache || new Map() this[WRITEENTRYCLASS] = WriteEntry - if (typeof opt.onwarn === 'function') + if (typeof opt.onwarn === 'function') { this.on('warn', opt.onwarn) + } this.portable = !!opt.portable this.zip = null if (opt.gzip) { - if (typeof opt.gzip !== 'object') + if (typeof opt.gzip !== 'object') { opt.gzip = {} - if (this.portable) + } + if (this.portable) { opt.gzip.portable = true + } this.zip = new zlib.Gzip(opt.gzip) this.zip.on('data', chunk => super.write(chunk)) this.zip.on('end', _ => super.end()) this.zip.on('drain', _ => this[ONDRAIN]()) this.on('resume', _ => this.zip.resume()) - } else + } else { this.on('drain', this[ONDRAIN]) + } this.noDirRecurse = !!opt.noDirRecurse this.follow = !!opt.follow @@ -115,30 +119,33 @@ const Pack = warner(class Pack extends MiniPass { } end (path) { - if (path) + if (path) { this.write(path) + } this[ENDED] = true this[PROCESS]() return this } write (path) { - if (this[ENDED]) + if (this[ENDED]) { throw new Error('write after end') + } - if (path instanceof ReadEntry) + if (path instanceof ReadEntry) { this[ADDTARENTRY](path) - else + } else { this[ADDFSENTRY](path) + } return this.flowing } [ADDTARENTRY] (p) { const absolute = normPath(path.resolve(this.cwd, p.path)) // in this case, we don't have to wait for the stat - if (!this.filter(p.path, p)) + if (!this.filter(p.path, p)) { p.resume() - else { + } else { const job = new PackJob(p.path, absolute, false) job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) job.entry.on('end', _ => this[JOBDONE](job)) @@ -162,10 +169,11 @@ const Pack = warner(class Pack extends MiniPass { fs[stat](job.absolute, (er, stat) => { job.pending = false this[JOBS] -= 1 - if (er) + if (er) { this.emit('error', er) - else + } else { this[ONSTAT](job, stat) + } }) } @@ -174,8 +182,9 @@ const Pack = warner(class Pack extends MiniPass { job.stat = stat // now we have the stat, we can filter it. - if (!this.filter(job.path, stat)) + if (!this.filter(job.path, stat)) { job.ignore = true + } this[PROCESS]() } @@ -186,8 +195,9 @@ const Pack = warner(class Pack extends MiniPass { fs.readdir(job.absolute, (er, entries) => { job.pending = false this[JOBS] -= 1 - if (er) + if (er) { return this.emit('error', er) + } this[ONREADDIR](job, entries) }) } @@ -199,8 +209,9 @@ const Pack = warner(class Pack extends MiniPass { } [PROCESS] () { - if (this[PROCESSING]) + if (this[PROCESSING]) { return + } this[PROCESSING] = true for (let w = this[QUEUE].head; @@ -217,9 +228,9 @@ const Pack = warner(class Pack extends MiniPass { this[PROCESSING] = false if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { - if (this.zip) + if (this.zip) { this.zip.end(EOF) - else { + } else { super.write(EOF) super.end() } @@ -237,35 +248,42 @@ const Pack = warner(class Pack extends MiniPass { } [PROCESSJOB] (job) { - if (job.pending) + if (job.pending) { return + } if (job.entry) { - if (job === this[CURRENT] && !job.piped) + if (job === this[CURRENT] && !job.piped) { this[PIPE](job) + } return } if (!job.stat) { - if (this.statCache.has(job.absolute)) + if (this.statCache.has(job.absolute)) { this[ONSTAT](job, this.statCache.get(job.absolute)) - else + } else { this[STAT](job) + } } - if (!job.stat) + if (!job.stat) { return + } // filtered out! - if (job.ignore) + if (job.ignore) { return + } if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { - if (this.readdirCache.has(job.absolute)) + if (this.readdirCache.has(job.absolute)) { this[ONREADDIR](job, this.readdirCache.get(job.absolute)) - else + } else { this[READDIR](job) - if (!job.readdir) + } + if (!job.readdir) { return + } } // we know it doesn't have an entry, because that got checked above @@ -275,8 +293,9 @@ const Pack = warner(class Pack extends MiniPass { return } - if (job === this[CURRENT] && !job.piped) + if (job === this[CURRENT] && !job.piped) { this[PIPE](job) + } } [ENTRYOPT] (job) { @@ -309,8 +328,9 @@ const Pack = warner(class Pack extends MiniPass { } [ONDRAIN] () { - if (this[CURRENT] && this[CURRENT].entry) + if (this[CURRENT] && this[CURRENT].entry) { this[CURRENT].entry.resume() + } } // like .pipe() but using super, because our write() is special @@ -330,20 +350,23 @@ const Pack = warner(class Pack extends MiniPass { if (zip) { source.on('data', chunk => { - if (!zip.write(chunk)) + if (!zip.write(chunk)) { source.pause() + } }) } else { source.on('data', chunk => { - if (!super.write(chunk)) + if (!super.write(chunk)) { source.pause() + } }) } } pause () { - if (this.zip) + if (this.zip) { this.zip.pause() + } return super.pause() } }) diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js index b1b4e7e47577c..4b85915cbe01e 100644 --- a/node_modules/tar/lib/parse.js +++ b/node_modules/tar/lib/parse.js @@ -28,6 +28,7 @@ const maxMetaEntrySize = 1024 * 1024 const Entry = require('./read-entry.js') const Pax = require('./pax.js') const zlib = require('minizlib') +const { nextTick } = require('process') const gzipHeader = Buffer.from([0x1f, 0x8b]) const STATE = Symbol('state') @@ -59,6 +60,7 @@ const DONE = Symbol('onDone') const SAW_VALID_ENTRY = Symbol('sawValidEntry') const SAW_NULL_BLOCK = Symbol('sawNullBlock') const SAW_EOF = Symbol('sawEOF') +const CLOSESTREAM = Symbol('closeStream') const noop = _ => true @@ -82,14 +84,13 @@ module.exports = warner(class Parser extends EE { } }) - if (opt.ondone) + if (opt.ondone) { this.on(DONE, opt.ondone) - else { + } else { this.on(DONE, _ => { this.emit('prefinish') this.emit('finish') this.emit('end') - this.emit('close') }) } @@ -114,15 +115,21 @@ module.exports = warner(class Parser extends EE { this[ABORTED] = false this[SAW_NULL_BLOCK] = false this[SAW_EOF] = false - if (typeof opt.onwarn === 'function') + + this.on('end', () => this[CLOSESTREAM]()) + + if (typeof opt.onwarn === 'function') { this.on('warn', opt.onwarn) - if (typeof opt.onentry === 'function') + } + if (typeof opt.onentry === 'function') { this.on('entry', opt.onentry) + } } [CONSUMEHEADER] (chunk, position) { - if (this[SAW_VALID_ENTRY] === null) + if (this[SAW_VALID_ENTRY] === null) { this[SAW_VALID_ENTRY] = false + } let header try { header = new Header(chunk, position, this[EX], this[GEX]) @@ -134,8 +141,9 @@ module.exports = warner(class Parser extends EE { if (this[SAW_NULL_BLOCK]) { this[SAW_EOF] = true // ending an archive with no entries. pointless, but legal. - if (this[STATE] === 'begin') + if (this[STATE] === 'begin') { this[STATE] = 'header' + } this[EMIT]('eof') } else { this[SAW_NULL_BLOCK] = true @@ -143,17 +151,17 @@ module.exports = warner(class Parser extends EE { } } else { this[SAW_NULL_BLOCK] = false - if (!header.cksumValid) - this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header}) - else if (!header.path) - this.warn('TAR_ENTRY_INVALID', 'path is required', {header}) - else { + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) + } else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) + } else { const type = header.type - if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) - this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header}) - else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) - this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header}) - else { + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) + } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) + } else { const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) // we do this for meta & ignored entries as well, because they @@ -162,12 +170,14 @@ module.exports = warner(class Parser extends EE { if (entry.remain) { // this might be the one! const onend = () => { - if (!entry.invalid) + if (!entry.invalid) { this[SAW_VALID_ENTRY] = true + } } entry.on('end', onend) - } else + } else { this[SAW_VALID_ENTRY] = true + } } if (entry.meta) { @@ -191,9 +201,9 @@ module.exports = warner(class Parser extends EE { this[STATE] = entry.remain ? 'ignore' : 'header' entry.resume() } else { - if (entry.remain) + if (entry.remain) { this[STATE] = 'body' - else { + } else { this[STATE] = 'header' entry.end() } @@ -201,8 +211,9 @@ module.exports = warner(class Parser extends EE { if (!this[READENTRY]) { this[QUEUE].push(entry) this[NEXTENTRY]() - } else + } else { this[QUEUE].push(entry) + } } } } @@ -210,15 +221,19 @@ module.exports = warner(class Parser extends EE { } } + [CLOSESTREAM] () { + nextTick(() => this.emit('close')) + } + [PROCESSENTRY] (entry) { let go = true if (!entry) { this[READENTRY] = null go = false - } else if (Array.isArray(entry)) + } else if (Array.isArray(entry)) { this.emit.apply(this, entry) - else { + } else { this[READENTRY] = entry this.emit('entry', entry) if (!entry.emittedEnd) { @@ -244,10 +259,12 @@ module.exports = warner(class Parser extends EE { const re = this[READENTRY] const drainNow = !re || re.flowing || re.size === re.remain if (drainNow) { - if (!this[WRITING]) + if (!this[WRITING]) { this.emit('drain') - } else + } + } else { re.once('drain', _ => this.emit('drain')) + } } } @@ -274,17 +291,19 @@ module.exports = warner(class Parser extends EE { const ret = this[CONSUMEBODY](chunk, position) // if we finished, then the entry is reset - if (!this[WRITEENTRY]) + if (!this[WRITEENTRY]) { this[EMITMETA](entry) + } return ret } [EMIT] (ev, data, extra) { - if (!this[QUEUE].length && !this[READENTRY]) + if (!this[QUEUE].length && !this[READENTRY]) { this.emit(ev, data, extra) - else + } else { this[QUEUE].push([ev, data, extra]) + } } [EMITMETA] (entry) { @@ -323,8 +342,9 @@ module.exports = warner(class Parser extends EE { } write (chunk) { - if (this[ABORTED]) + if (this[ABORTED]) { return + } // first write, might be gzipped if (this[UNZIP] === null && chunk) { @@ -337,8 +357,9 @@ module.exports = warner(class Parser extends EE { return true } for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { - if (chunk[i] !== gzipHeader[i]) + if (chunk[i] !== gzipHeader[i]) { this[UNZIP] = false + } } if (this[UNZIP] === null) { const ended = this[ENDED] @@ -358,10 +379,11 @@ module.exports = warner(class Parser extends EE { } this[WRITING] = true - if (this[UNZIP]) + if (this[UNZIP]) { this[UNZIP].write(chunk) - else + } else { this[CONSUMECHUNK](chunk) + } this[WRITING] = false // return false if there's a queue, or if the current entry isn't flowing @@ -371,15 +393,17 @@ module.exports = warner(class Parser extends EE { true // if we have no queue, then that means a clogged READENTRY - if (!ret && !this[QUEUE].length) + if (!ret && !this[QUEUE].length) { this[READENTRY].once('drain', _ => this.emit('drain')) + } return ret } [BUFFERCONCAT] (c) { - if (c && !this[ABORTED]) + if (c && !this[ABORTED]) { this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c + } } [MAYBEEND] () { @@ -393,9 +417,10 @@ module.exports = warner(class Parser extends EE { // truncated, likely a damaged file const have = this[BUFFER] ? this[BUFFER].length : 0 this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ - entry.blockRemain} more bytes, only ${have} available)`, {entry}) - if (this[BUFFER]) + entry.blockRemain} more bytes, only ${have} available)`, { entry }) + if (this[BUFFER]) { entry.write(this[BUFFER]) + } entry.end() } this[EMIT](DONE) @@ -403,19 +428,20 @@ module.exports = warner(class Parser extends EE { } [CONSUMECHUNK] (chunk) { - if (this[CONSUMING]) + if (this[CONSUMING]) { this[BUFFERCONCAT](chunk) - else if (!chunk && !this[BUFFER]) + } else if (!chunk && !this[BUFFER]) { this[MAYBEEND]() - else { + } else { this[CONSUMING] = true if (this[BUFFER]) { this[BUFFERCONCAT](chunk) const c = this[BUFFER] this[BUFFER] = null this[CONSUMECHUNKSUB](c) - } else + } else { this[CONSUMECHUNKSUB](chunk) + } while (this[BUFFER] && this[BUFFER].length >= 512 && @@ -428,8 +454,9 @@ module.exports = warner(class Parser extends EE { this[CONSUMING] = false } - if (!this[BUFFER] || this[ENDED]) + if (!this[BUFFER] || this[ENDED]) { this[MAYBEEND]() + } } [CONSUMECHUNKSUB] (chunk) { @@ -461,18 +488,19 @@ module.exports = warner(class Parser extends EE { } if (position < length) { - if (this[BUFFER]) + if (this[BUFFER]) { this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) - else + } else { this[BUFFER] = chunk.slice(position) + } } } end (chunk) { if (!this[ABORTED]) { - if (this[UNZIP]) + if (this[UNZIP]) { this[UNZIP].end(chunk) - else { + } else { this[ENDED] = true this.write(chunk) } diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js index 8183c45f8535c..ef380cab685e3 100644 --- a/node_modules/tar/lib/path-reservations.js +++ b/node_modules/tar/lib/path-reservations.js @@ -27,8 +27,9 @@ module.exports = () => { // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] const getDirs = path => { const dirs = path.split('/').slice(0, -1).reduce((set, path) => { - if (set.length) + if (set.length) { path = join(set[set.length - 1], path) + } set.push(path || '/') return set }, []) @@ -43,8 +44,9 @@ module.exports = () => { const getQueues = fn => { const res = reservations.get(fn) /* istanbul ignore if - unpossible */ - if (!res) + if (!res) { throw new Error('function does not have any path reservations') + } return { paths: res.paths.map(path => queues.get(path)), dirs: [...res.dirs].map(path => queues.get(path)), @@ -54,23 +56,25 @@ module.exports = () => { // check if fn is first in line for all its paths, and is // included in the first set for all its dir queues const check = fn => { - const {paths, dirs} = getQueues(fn) + const { paths, dirs } = getQueues(fn) return paths.every(q => q[0] === fn) && dirs.every(q => q[0] instanceof Set && q[0].has(fn)) } // run the function if it's first in line and not already running const run = fn => { - if (running.has(fn) || !check(fn)) + if (running.has(fn) || !check(fn)) { return false + } running.add(fn) fn(() => clear(fn)) return true } const clear = fn => { - if (!running.has(fn)) + if (!running.has(fn)) { return false + } const { paths, dirs } = reservations.get(fn) const next = new Set() @@ -78,29 +82,31 @@ module.exports = () => { paths.forEach(path => { const q = queues.get(path) assert.equal(q[0], fn) - if (q.length === 1) + if (q.length === 1) { queues.delete(path) - else { + } else { q.shift() - if (typeof q[0] === 'function') + if (typeof q[0] === 'function') { next.add(q[0]) - else + } else { q[0].forEach(fn => next.add(fn)) + } } }) dirs.forEach(dir => { const q = queues.get(dir) assert(q[0] instanceof Set) - if (q[0].size === 1 && q.length === 1) + if (q[0].size === 1 && q.length === 1) { queues.delete(dir) - else if (q[0].size === 1) { + } else if (q[0].size === 1) { q.shift() // must be a function or else the Set would've been reused next.add(q[0]) - } else + } else { q[0].delete(fn) + } }) running.delete(fn) @@ -123,22 +129,24 @@ module.exports = () => { const dirs = new Set( paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) ) - reservations.set(fn, {dirs, paths}) + reservations.set(fn, { dirs, paths }) paths.forEach(path => { const q = queues.get(path) - if (!q) + if (!q) { queues.set(path, [fn]) - else + } else { q.push(fn) + } }) dirs.forEach(dir => { const q = queues.get(dir) - if (!q) + if (!q) { queues.set(dir, [new Set([fn])]) - else if (q[q.length - 1] instanceof Set) + } else if (q[q.length - 1] instanceof Set) { q[q.length - 1].add(fn) - else + } else { q.push(new Set([fn])) + } }) return run(fn) diff --git a/node_modules/tar/lib/pax.js b/node_modules/tar/lib/pax.js index 7768c7b454f76..4a7ca85386e83 100644 --- a/node_modules/tar/lib/pax.js +++ b/node_modules/tar/lib/pax.js @@ -24,8 +24,9 @@ class Pax { encode () { const body = this.encodeBody() - if (body === '') + if (body === '') { return null + } const bodyLen = Buffer.byteLength(body) // round up to 512 bytes @@ -34,8 +35,9 @@ class Pax { const buf = Buffer.allocUnsafe(bufLen) // 0-fill the header section, it might not hit every field - for (let i = 0; i < 512; i++) + for (let i = 0; i < 512; i++) { buf[i] = 0 + } new Header({ // XXX split the path @@ -60,8 +62,9 @@ class Pax { buf.write(body, 512, bodyLen, 'utf8') // null pad after the body - for (let i = bodyLen + 512; i < buf.length; i++) + for (let i = bodyLen + 512; i < buf.length; i++) { buf[i] = 0 + } return buf } @@ -87,8 +90,9 @@ class Pax { } encodeField (field) { - if (this[field] === null || this[field] === undefined) + if (this[field] === null || this[field] === undefined) { return '' + } const v = this[field] instanceof Date ? this[field].getTime() / 1000 : this[field] const s = ' ' + @@ -100,8 +104,9 @@ class Pax { // so if it's 9 characters, then adding 1 for the 9 makes it 10 // which makes it 11 chars. let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 - if (byteLen + digits >= Math.pow(10, digits)) + if (byteLen + digits >= Math.pow(10, digits)) { digits += 1 + } const len = digits + byteLen return len + s } @@ -123,14 +128,16 @@ const parseKVLine = (set, line) => { // XXX Values with \n in them will fail this. // Refactor to not be a naive line-by-line parse. - if (n !== Buffer.byteLength(line) + 1) + if (n !== Buffer.byteLength(line) + 1) { return set + } - line = line.substr((n + ' ').length) + line = line.slice((n + ' ').length) const kv = line.split('=') const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') - if (!k) + if (!k) { return set + } const v = kv.join('=') set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js index 183a6050ba0d2..7f44bebfc10bd 100644 --- a/node_modules/tar/lib/read-entry.js +++ b/node_modules/tar/lib/read-entry.js @@ -49,8 +49,9 @@ module.exports = class ReadEntry extends MiniPass { this.path = normPath(header.path) this.mode = header.mode - if (this.mode) + if (this.mode) { this.mode = this.mode & 0o7777 + } this.uid = header.uid this.gid = header.gid this.uname = header.uname @@ -63,26 +64,31 @@ module.exports = class ReadEntry extends MiniPass { this.uname = header.uname this.gname = header.gname - if (ex) + if (ex) { this[SLURP](ex) - if (gex) + } + if (gex) { this[SLURP](gex, true) + } } write (data) { const writeLen = data.length - if (writeLen > this.blockRemain) + if (writeLen > this.blockRemain) { throw new Error('writing more to entry than is appropriate') + } const r = this.remain const br = this.blockRemain this.remain = Math.max(0, r - writeLen) this.blockRemain = Math.max(0, br - writeLen) - if (this.ignore) + if (this.ignore) { return true + } - if (r >= writeLen) + if (r >= writeLen) { return super.write(data) + } // r < writeLen return super.write(data.slice(0, r)) @@ -93,8 +99,9 @@ module.exports = class ReadEntry extends MiniPass { // we slurp in everything except for the path attribute in // a global extended header, because that's weird. if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) + !(global && k === 'path')) { this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] + } } } } diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js index 1374f3f29c619..c6e619be6f032 100644 --- a/node_modules/tar/lib/replace.js +++ b/node_modules/tar/lib/replace.js @@ -19,14 +19,17 @@ const Header = require('./header.js') module.exports = (opt_, files, cb) => { const opt = hlo(opt_) - if (!opt.file) + if (!opt.file) { throw new TypeError('file is required') + } - if (opt.gzip) + if (opt.gzip) { throw new TypeError('cannot append to compressed archives') + } - if (!files || !Array.isArray(files) || !files.length) + if (!files || !Array.isArray(files) || !files.length) { throw new TypeError('no files or directories specified') + } files = Array.from(files) @@ -45,10 +48,11 @@ const replaceSync = (opt, files) => { try { fd = fs.openSync(opt.file, 'r+') } catch (er) { - if (er.code === 'ENOENT') + if (er.code === 'ENOENT') { fd = fs.openSync(opt.file, 'w+') - else + } else { throw er + } } const st = fs.fstatSync(fd) @@ -60,24 +64,29 @@ const replaceSync = (opt, files) => { fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos ) - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { throw new Error('cannot append to compressed archives') + } - if (!bytes) + if (!bytes) { break POSITION + } } const h = new Header(headBuf) - if (!h.cksumValid) + if (!h.cksumValid) { break + } const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > st.size) + if (position + entryBlockSize + 512 > st.size) { break + } // the 512 for the header we just parsed will be added as well // also jump ahead all the blocks for the body position += entryBlockSize - if (opt.mtimeCache) + if (opt.mtimeCache) { opt.mtimeCache.set(h.path, h.mtime) + } } threw = false @@ -106,21 +115,24 @@ const replace = (opt, files, cb) => { const getPos = (fd, size, cb_) => { const cb = (er, pos) => { - if (er) + if (er) { fs.close(fd, _ => cb_(er)) - else + } else { cb_(null, pos) + } } let position = 0 - if (size === 0) + if (size === 0) { return cb(null, 0) + } let bufPos = 0 const headBuf = Buffer.alloc(512) const onread = (er, bytes) => { - if (er) + if (er) { return cb(er) + } bufPos += bytes if (bufPos < 512 && bytes) { return fs.read( @@ -129,27 +141,33 @@ const replace = (opt, files, cb) => { ) } - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { return cb(new Error('cannot append to compressed archives')) + } // truncated header - if (bufPos < 512) + if (bufPos < 512) { return cb(null, position) + } const h = new Header(headBuf) - if (!h.cksumValid) + if (!h.cksumValid) { return cb(null, position) + } const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > size) + if (position + entryBlockSize + 512 > size) { return cb(null, position) + } position += entryBlockSize + 512 - if (position >= size) + if (position >= size) { return cb(null, position) + } - if (opt.mtimeCache) + if (opt.mtimeCache) { opt.mtimeCache.set(h.path, h.mtime) + } bufPos = 0 fs.read(fd, headBuf, 0, 512, position, onread) } @@ -165,16 +183,19 @@ const replace = (opt, files, cb) => { return fs.open(opt.file, flag, onopen) } - if (er) + if (er) { return reject(er) + } fs.fstat(fd, (er, st) => { - if (er) + if (er) { return fs.close(fd, () => reject(er)) + } getPos(fd, st.size, (er, position) => { - if (er) + if (er) { return reject(er) + } const stream = new fsm.WriteStream(opt.file, { fd: fd, start: position, @@ -196,13 +217,14 @@ const addFilesSync = (p, files) => { files.forEach(file => { if (file.charAt(0) === '@') { t({ - file: path.resolve(p.cwd, file.substr(1)), + file: path.resolve(p.cwd, file.slice(1)), sync: true, noResume: true, onentry: entry => p.add(entry), }) - } else + } else { p.add(file) + } }) p.end() } @@ -212,12 +234,13 @@ const addFilesAsync = (p, files) => { const file = files.shift() if (file.charAt(0) === '@') { return t({ - file: path.resolve(p.cwd, file.substr(1)), + file: path.resolve(p.cwd, file.slice(1)), noResume: true, onentry: entry => p.add(entry), }).then(_ => addFilesAsync(p, files)) - } else + } else { p.add(file) + } } p.end() } diff --git a/node_modules/tar/lib/strip-absolute-path.js b/node_modules/tar/lib/strip-absolute-path.js index 1aa2d2aec5030..185e2dead3929 100644 --- a/node_modules/tar/lib/strip-absolute-path.js +++ b/node_modules/tar/lib/strip-absolute-path.js @@ -16,7 +16,7 @@ module.exports = path => { // but strip the //?/C:/ off of //?/C:/path const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/' : parsed.root - path = path.substr(root.length) + path = path.slice(root.length) r += root parsed = parse(path) } diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js index 7d39dc0f7e79f..e341ad0c7239e 100644 --- a/node_modules/tar/lib/unpack.js +++ b/node_modules/tar/lib/unpack.js @@ -66,21 +66,24 @@ const isWindows = platform === 'win32' // See: https://github.com/npm/node-tar/issues/183 /* istanbul ignore next */ const unlinkFile = (path, cb) => { - if (!isWindows) + if (!isWindows) { return fs.unlink(path, cb) + } const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') fs.rename(path, name, er => { - if (er) + if (er) { return cb(er) + } fs.unlink(name, cb) }) } /* istanbul ignore next */ const unlinkFileSync = path => { - if (!isWindows) + if (!isWindows) { return fs.unlinkSync(path) + } const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') fs.renameSync(path, name) @@ -109,20 +112,23 @@ const pruneCache = (cache, abs) => { abs = cacheKeyNormalize(abs) for (const path of cache.keys()) { const pnorm = cacheKeyNormalize(path) - if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { cache.delete(path) + } } } const dropCache = cache => { - for (const key of cache.keys()) + for (const key of cache.keys()) { cache.delete(key) + } } class Unpack extends Parser { constructor (opt) { - if (!opt) + if (!opt) { opt = {} + } opt.ondone = _ => { this[ENDED] = true @@ -147,8 +153,9 @@ class Unpack extends Parser { if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { // need both or neither - if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') + if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { throw new TypeError('cannot set owner without number uid and gid') + } if (opt.preserveOwner) { throw new TypeError( 'cannot preserve owner in archive and also set owner explicitly') @@ -163,10 +170,11 @@ class Unpack extends Parser { } // default true for root - if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') + if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { this.preserveOwner = process.getuid && process.getuid() === 0 - else + } else { this.preserveOwner = !!opt.preserveOwner + } this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? process.getuid() : null @@ -215,8 +223,9 @@ class Unpack extends Parser { // when extracting. Mark those errors as unrecoverable, because // the Unpack contract cannot be met. warn (code, msg, data = {}) { - if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { data.recoverable = false + } return super.warn(code, msg, data) } @@ -225,23 +234,24 @@ class Unpack extends Parser { this.emit('prefinish') this.emit('finish') this.emit('end') - this.emit('close') } } [CHECKPATH] (entry) { if (this.strip) { const parts = normPath(entry.path).split('/') - if (parts.length < this.strip) + if (parts.length < this.strip) { return false + } entry.path = parts.slice(this.strip).join('/') if (entry.type === 'Link') { const linkparts = normPath(entry.linkpath).split('/') - if (linkparts.length >= this.strip) + if (linkparts.length >= this.strip) { entry.linkpath = linkparts.slice(this.strip).join('/') - else + } else { return false + } } } @@ -267,10 +277,11 @@ class Unpack extends Parser { } } - if (path.isAbsolute(entry.path)) + if (path.isAbsolute(entry.path)) { entry.absolute = normPath(path.resolve(entry.path)) - else + } else { entry.absolute = normPath(path.resolve(this.cwd, entry.path)) + } // if we somehow ended up with a path that escapes the cwd, and we are // not in preservePaths mode, then something is fishy! This should have @@ -292,32 +303,36 @@ class Unpack extends Parser { // may not replace the cwd with a different kind of thing entirely. if (entry.absolute === this.cwd && entry.type !== 'Directory' && - entry.type !== 'GNUDumpDir') + entry.type !== 'GNUDumpDir') { return false + } // only encode : chars that aren't drive letter indicators if (this.win32) { const { root: aRoot } = path.win32.parse(entry.absolute) - entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length)) + entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) const { root: pRoot } = path.win32.parse(entry.path) - entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length)) + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) } return true } [ONENTRY] (entry) { - if (!this[CHECKPATH](entry)) + if (!this[CHECKPATH](entry)) { return entry.resume() + } assert.equal(typeof entry.absolute, 'string') switch (entry.type) { case 'Directory': case 'GNUDumpDir': - if (entry.mode) + if (entry.mode) { entry.mode = entry.mode | 0o700 + } + // eslint-disable-next-line no-fallthrough case 'File': case 'OldFile': case 'ContiguousFile': @@ -337,10 +352,10 @@ class Unpack extends Parser { // Cwd has to exist, or else nothing works. That's serious. // Other errors are warnings, which raise the error in strict // mode, but otherwise continue on. - if (er.name === 'CwdError') + if (er.name === 'CwdError') { this.emit('error', er) - else { - this.warn('TAR_ENTRY_ERROR', er, {entry}) + } else { + this.warn('TAR_ENTRY_ERROR', er, { entry }) this[UNPEND]() entry.resume() } @@ -390,8 +405,9 @@ class Unpack extends Parser { autoClose: false, }) stream.on('error', er => { - if (stream.fd) + if (stream.fd) { fs.close(stream.fd, () => {}) + } // flush all the data out so that we aren't left hanging // if the error wasn't actually fatal. otherwise the parse @@ -405,8 +421,9 @@ class Unpack extends Parser { const done = er => { if (er) { /* istanbul ignore else - we should always have a fd by now */ - if (stream.fd) + if (stream.fd) { fs.close(stream.fd, () => {}) + } this[ONERROR](er, entry) fullyDone() @@ -415,10 +432,11 @@ class Unpack extends Parser { if (--actions === 0) { fs.close(stream.fd, er => { - if (er) + if (er) { this[ONERROR](er, entry) - else + } else { this[UNPEND]() + } fullyDone() }) } @@ -498,7 +516,7 @@ class Unpack extends Parser { [UNSUPPORTED] (entry) { entry.unsupported = true this.warn('TAR_ENTRY_UNSUPPORTED', - `unsupported entry type: ${entry.type}`, {entry}) + `unsupported entry type: ${entry.type}`, { entry }) entry.resume() } @@ -540,8 +558,9 @@ class Unpack extends Parser { [CHECKFS] (entry) { this[PEND]() const paths = [entry.path] - if (entry.linkpath) + if (entry.linkpath) { paths.push(entry.linkpath) + } this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) } @@ -556,10 +575,11 @@ class Unpack extends Parser { // entry, it'll just fail to unpack, but a symlink to a directory, using an // 8.3 shortname or certain unicode attacks, can evade detection and lead // to arbitrary writes to anywhere on the system. - if (entry.type === 'SymbolicLink') + if (entry.type === 'SymbolicLink') { dropCache(this.dirCache) - else if (entry.type !== 'Directory') + } else if (entry.type !== 'Directory') { pruneCache(this.dirCache, entry.absolute) + } } [CHECKFS2] (entry, fullyDone) { @@ -606,8 +626,9 @@ class Unpack extends Parser { done() return } - if (lstatEr || this[ISREUSABLE](entry, st)) + if (lstatEr || this[ISREUSABLE](entry, st)) { return this[MAKEFS](null, entry, done) + } if (st.isDirectory()) { if (entry.type === 'Directory') { @@ -615,8 +636,9 @@ class Unpack extends Parser { entry.mode && (st.mode & 0o7777) !== entry.mode const afterChmod = er => this[MAKEFS](er, entry, done) - if (!needChmod) + if (!needChmod) { return afterChmod() + } return fs.chmod(entry.absolute, entry.mode, afterChmod) } // Not a dir entry, have to remove it. @@ -634,18 +656,20 @@ class Unpack extends Parser { // not a dir, and not reusable // don't remove if the cwd, we want that error - if (entry.absolute === this.cwd) + if (entry.absolute === this.cwd) { return this[MAKEFS](null, entry, done) + } unlinkFile(entry.absolute, er => this[MAKEFS](er, entry, done)) }) } - if (this[CHECKED_CWD]) + if (this[CHECKED_CWD]) { start() - else + } else { checkCwd() + } } [MAKEFS] (er, entry, done) { @@ -676,9 +700,9 @@ class Unpack extends Parser { [LINK] (entry, linkpath, link, done) { // XXX: get the type ('symlink' or 'junction') for windows fs[link](linkpath, entry.absolute, er => { - if (er) + if (er) { this[ONERROR](er, entry) - else { + } else { this[UNPEND]() entry.resume() } @@ -704,8 +728,9 @@ class UnpackSync extends Unpack { if (!this[CHECKED_CWD]) { const er = this[MKDIR](this.cwd, this.dmode) - if (er) + if (er) { return this[ONERROR](er, entry) + } this[CHECKED_CWD] = true } @@ -715,17 +740,20 @@ class UnpackSync extends Unpack { const parent = normPath(path.dirname(entry.absolute)) if (parent !== this.cwd) { const mkParent = this[MKDIR](parent, this.dmode) - if (mkParent) + if (mkParent) { return this[ONERROR](mkParent, entry) + } } } const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { return this[SKIP](entry) + } - if (lstatEr || this[ISREUSABLE](entry, st)) + if (lstatEr || this[ISREUSABLE](entry, st)) { return this[MAKEFS](null, entry) + } if (st.isDirectory()) { if (entry.type === 'Directory') { @@ -759,8 +787,9 @@ class UnpackSync extends Unpack { } catch (e) { closeError = e } - if (er || closeError) + if (er || closeError) { this[ONERROR](er || closeError, entry) + } done() } diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js index a5784b73f3c75..ded977dc1dec0 100644 --- a/node_modules/tar/lib/update.js +++ b/node_modules/tar/lib/update.js @@ -9,14 +9,17 @@ const r = require('./replace.js') module.exports = (opt_, files, cb) => { const opt = hlo(opt_) - if (!opt.file) + if (!opt.file) { throw new TypeError('file is required') + } - if (opt.gzip) + if (opt.gzip) { throw new TypeError('cannot append to compressed archives') + } - if (!files || !Array.isArray(files) || !files.length) + if (!files || !Array.isArray(files) || !files.length) { throw new TypeError('no files or directories specified') + } files = Array.from(files) @@ -27,8 +30,9 @@ module.exports = (opt_, files, cb) => { const mtimeFilter = opt => { const filter = opt.filter - if (!opt.mtimeCache) + if (!opt.mtimeCache) { opt.mtimeCache = new Map() + } opt.filter = filter ? (path, stat) => filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) diff --git a/node_modules/tar/lib/warn-mixin.js b/node_modules/tar/lib/warn-mixin.js index aeebb531b5701..a940639636133 100644 --- a/node_modules/tar/lib/warn-mixin.js +++ b/node_modules/tar/lib/warn-mixin.js @@ -1,10 +1,12 @@ 'use strict' module.exports = Base => class extends Base { warn (code, message, data = {}) { - if (this.file) + if (this.file) { data.file = this.file - if (this.cwd) + } + if (this.cwd) { data.cwd = this.cwd + } data.code = message instanceof Error && message.code || code data.tarCode = code if (!this.strict && data.recoverable !== false) { @@ -13,9 +15,10 @@ module.exports = Base => class extends Base { message = message.message } this.emit('warn', data.tarCode, message, data) - } else if (message instanceof Error) + } else if (message instanceof Error) { this.emit('error', Object.assign(message, data)) - else + } else { this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) + } } } diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js index 3702f2ae51979..3b5540f76c020 100644 --- a/node_modules/tar/lib/write-entry.js +++ b/node_modules/tar/lib/write-entry.js @@ -8,8 +8,9 @@ const normPath = require('./normalize-windows-path.js') const stripSlash = require('./strip-trailing-slashes.js') const prefixPath = (path, prefix) => { - if (!prefix) + if (!prefix) { return normPath(path) + } path = normPath(path).replace(/^\.(\/|$)/, '') return stripSlash(prefix) + '/' + path } @@ -44,8 +45,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { constructor (p, opt) { opt = opt || {} super(opt) - if (typeof p !== 'string') + if (typeof p !== 'string') { throw new TypeError('path is required') + } this.path = normPath(p) // suppress atime, ctime, uid, gid, uname, gname this.portable = !!opt.portable @@ -72,8 +74,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.pos = null this.remain = null - if (typeof opt.onwarn === 'function') + if (typeof opt.onwarn === 'function') { this.on('warn', opt.onwarn) + } let pathWarn = false if (!this.preservePaths) { @@ -94,8 +97,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) - if (this.path === '') + if (this.path === '') { this.path = './' + } if (pathWarn) { this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { @@ -104,22 +108,25 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { }) } - if (this.statCache.has(this.absolute)) + if (this.statCache.has(this.absolute)) { this[ONLSTAT](this.statCache.get(this.absolute)) - else + } else { this[LSTAT]() + } } emit (ev, ...data) { - if (ev === 'error') + if (ev === 'error') { this[HAD_ERROR] = true + } return super.emit(ev, ...data) } [LSTAT] () { fs.lstat(this.absolute, (er, stat) => { - if (er) + if (er) { return this.emit('error', er) + } this[ONLSTAT](stat) }) } @@ -127,8 +134,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { [ONLSTAT] (stat) { this.statCache.set(this.absolute, stat) this.stat = stat - if (!stat.isFile()) + if (!stat.isFile()) { stat.size = 0 + } this.type = getType(stat) this.emit('stat', stat) this[PROCESS]() @@ -153,8 +161,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { } [HEADER] () { - if (this.type === 'Directory' && this.portable) + if (this.type === 'Directory' && this.portable) { this.noMtime = true + } this.header = new Header({ path: this[PREFIX](this.path), @@ -196,8 +205,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { } [DIRECTORY] () { - if (this.path.substr(-1) !== '/') + if (this.path.slice(-1) !== '/') { this.path += '/' + } this.stat.size = 0 this[HEADER]() this.end() @@ -205,8 +215,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { [SYMLINK] () { fs.readlink(this.absolute, (er, linkpath) => { - if (er) + if (er) { return this.emit('error', er) + } this[ONREADLINK](linkpath) }) } @@ -230,31 +241,35 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { const linkKey = this.stat.dev + ':' + this.stat.ino if (this.linkCache.has(linkKey)) { const linkpath = this.linkCache.get(linkKey) - if (linkpath.indexOf(this.cwd) === 0) + if (linkpath.indexOf(this.cwd) === 0) { return this[HARDLINK](linkpath) + } } this.linkCache.set(linkKey, this.absolute) } this[HEADER]() - if (this.stat.size === 0) + if (this.stat.size === 0) { return this.end() + } this[OPENFILE]() } [OPENFILE] () { fs.open(this.absolute, 'r', (er, fd) => { - if (er) + if (er) { return this.emit('error', er) + } this[ONOPENFILE](fd) }) } [ONOPENFILE] (fd) { this.fd = fd - if (this[HAD_ERROR]) + if (this[HAD_ERROR]) { return this[CLOSE]() + } this.blockLen = 512 * Math.ceil(this.stat.size / 512) this.blockRemain = this.blockLen @@ -318,10 +333,11 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { this.buf : this.buf.slice(this.offset, this.offset + bytesRead) const flushed = this.write(writeBuf) - if (!flushed) + if (!flushed) { this[AWAITDRAIN](() => this[ONDRAIN]()) - else + } else { this[ONDRAIN]() + } } [AWAITDRAIN] (cb) { @@ -343,8 +359,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass { [ONDRAIN] () { if (!this.remain) { - if (this.blockRemain) + if (this.blockRemain) { super.write(Buffer.alloc(this.blockRemain)) + } return this[CLOSE](er => er ? this.emit('error', er) : this.end()) } @@ -412,8 +429,9 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.readEntry = readEntry this.type = readEntry.type - if (this.type === 'Directory' && this.portable) + if (this.type === 'Directory' && this.portable) { this.noMtime = true + } this.prefix = opt.prefix || null @@ -429,8 +447,9 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { this.ctime = this.portable ? null : readEntry.ctime this.linkpath = normPath(readEntry.linkpath) - if (typeof opt.onwarn === 'function') + if (typeof opt.onwarn === 'function') { this.on('warn', opt.onwarn) + } let pathWarn = false if (!this.preservePaths) { @@ -500,15 +519,17 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass { write (data) { const writeLen = data.length - if (writeLen > this.blockRemain) + if (writeLen > this.blockRemain) { throw new Error('writing more to entry than is appropriate') + } this.blockRemain -= writeLen return super.write(data) } end () { - if (this.blockRemain) + if (this.blockRemain) { super.write(Buffer.alloc(this.blockRemain)) + } return super.end() } }) diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json index 9f9977a0ca99b..3a02105c159e4 100644 --- a/node_modules/tar/package.json +++ b/node_modules/tar/package.json @@ -1,25 +1,21 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": "GitHub Inc.", "name": "tar", "description": "tar for node", - "version": "6.1.11", + "version": "6.1.12", "repository": { "type": "git", "url": "https://github.com/npm/node-tar.git" }, "scripts": { - "test:posix": "tap", - "test:win32": "tap --lines=98 --branches=98 --statements=98 --functions=98", - "test": "node test/fixtures/test.js", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "npm run eslint -- test lib", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", "genparse": "node scripts/generate-parse-fixtures.js", - "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done" + "template-oss-apply": "template-oss-apply --force", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint" }, "dependencies": { "chownr": "^2.0.0", @@ -30,30 +26,50 @@ "yallist": "^4.0.0" }, "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.8.0", "chmodr": "^1.2.0", "end-of-stream": "^1.4.3", - "eslint": "^7.17.0", - "eslint-plugin-import": "^2.22.1", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^4.2.1", - "eslint-plugin-standard": "^5.0.0", "events-to-array": "^1.1.2", "mutate-fs": "^2.1.1", - "rimraf": "^2.7.1", - "tap": "^15.0.9", - "tar-fs": "^1.16.3", - "tar-stream": "^1.6.2" + "nock": "^13.2.9", + "rimraf": "^3.0.2", + "tap": "^16.0.1" }, "license": "ISC", "engines": { - "node": ">= 10" + "node": ">=10" }, "files": [ - "index.js", - "lib/*.js" + "bin/", + "lib/", + "index.js" ], "tap": { "coverage-map": "map.js", - "check-coverage": true + "timeout": 0, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.8.0", + "content": "scripts/template-oss", + "engines": ">=10", + "distPaths": [ + "index.js" + ], + "allowPaths": [ + "/index.js" + ], + "ciVersions": [ + "10.x", + "12.x", + "14.x", + "16.x", + "18.x" + ] } } diff --git a/package-lock.json b/package-lock.json index 1ee8562af34f4..c61a7e1fc383e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -147,7 +147,7 @@ "rimraf": "^3.0.2", "semver": "^7.3.8", "ssri": "^10.0.0", - "tar": "^6.1.11", + "tar": "^6.1.12", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", @@ -12938,9 +12938,10 @@ } }, "node_modules/tar": { - "version": "6.1.11", + "version": "6.1.12", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.12.tgz", + "integrity": "sha512-jU4TdemS31uABHd+Lt5WEYJuzn+TJTCBLljvIAHZOz6M9Os5pJ4dD+vRFLxPa/n3T0iEFzpi+0x1UfuDZYbRMw==", "inBundle": true, - "license": "ISC", "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -12950,7 +12951,7 @@ "yallist": "^4.0.0" }, "engines": { - "node": ">= 10" + "node": ">=10" } }, "node_modules/tar-fs": { diff --git a/package.json b/package.json index 509aba5044d9c..278128cebfee0 100644 --- a/package.json +++ b/package.json @@ -117,7 +117,7 @@ "rimraf": "^3.0.2", "semver": "^7.3.8", "ssri": "^10.0.0", - "tar": "^6.1.11", + "tar": "^6.1.12", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0",