diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js index 0aeb454725bd0..f5128fe185d0f 100644 --- a/node_modules/cacache/lib/content/read.js +++ b/node_modules/cacache/lib/content/read.js @@ -1,42 +1,35 @@ 'use strict' -const util = require('util') - -const fs = require('fs') +const fs = require('@npmcli/fs') const fsm = require('fs-minipass') const ssri = require('ssri') const contentPath = require('./path') const Pipeline = require('minipass-pipeline') -const lstat = util.promisify(fs.lstat) -const readFile = util.promisify(fs.readFile) -const copyFile = util.promisify(fs.copyFile) - module.exports = read const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -function read (cache, integrity, opts = {}) { +async function read (cache, integrity, opts = {}) { const { size } = opts - return withContentSri(cache, integrity, (cpath, sri) => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { // get size - return lstat(cpath).then(stat => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } + const stat = await fs.lstat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } - return readFile(cpath, null).then((data) => { - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } - return data - }) - }) + return data } const readPipeline = (cpath, size, sri, stream) => { @@ -58,7 +51,7 @@ module.exports.sync = readSync function readSync (cache, integrity, opts = {}) { const { size } = opts return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath) + const data = fs.readFileSync(cpath, { encoding: null }) if (typeof size === 'number' && size !== data.length) { throw sizeError(size, data.length) } @@ -77,16 +70,19 @@ module.exports.readStream = readStream function readStream (cache, integrity, opts = {}) { const { size } = opts const stream = new Pipeline() - withContentSri(cache, integrity, (cpath, sri) => { - // just lstat to ensure it exists - return lstat(cpath).then((stat) => ({ stat, cpath, sri })) - }).then(({ stat, cpath, sri }) => { + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just lstat to ensure it exists + const stat = await fs.lstat(cpath) + return { stat, cpath, sri } + }) if (typeof size === 'number' && size !== stat.size) { return stream.emit('error', sizeError(size, stat.size)) } readPipeline(cpath, stat.size, sri, stream) - }, er => stream.emit('error', er)) + }).catch(err => stream.emit('error', err)) return stream } @@ -96,7 +92,7 @@ module.exports.copy.sync = copySync function copy (cache, integrity, dest) { return withContentSri(cache, integrity, (cpath, sri) => { - return copyFile(cpath, dest) + return fs.copyFile(cpath, dest) }) } @@ -108,14 +104,17 @@ function copySync (cache, integrity, dest) { module.exports.hasContent = hasContent -function hasContent (cache, integrity) { +async function hasContent (cache, integrity) { if (!integrity) { - return Promise.resolve(false) + return false } - return withContentSri(cache, integrity, (cpath, sri) => { - return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) - }).catch((err) => { + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.lstat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { if (err.code === 'ENOENT') { return false } @@ -128,7 +127,7 @@ function hasContent (cache, integrity) { return false } } - }) + } } module.exports.hasContent.sync = hasContentSync @@ -159,61 +158,47 @@ function hasContentSync (cache, integrity) { }) } -function withContentSri (cache, integrity, fn) { - const tryFn = () => { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - return Promise - .all(digests.map((meta) => { - return withContentSri(cache, meta, fn) - .catch((err) => { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - }) - })) - .then((results) => { - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - }) +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result } - } - return new Promise((resolve, reject) => { - try { - tryFn() - .then(resolve) - .catch(reject) - } catch (err) { - reject(err) + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError } - }) + + // Throw generic error + throw results.find((r) => r instanceof Error) + } } function withContentSriSync (cache, integrity, fn) { diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js index 50612364e9b48..f7333053b393f 100644 --- a/node_modules/cacache/lib/content/rm.js +++ b/node_modules/cacache/lib/content/rm.js @@ -8,13 +8,13 @@ const rimraf = util.promisify(require('rimraf')) module.exports = rm -function rm (cache, integrity) { - return hasContent(cache, integrity).then((content) => { - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - return rimraf(contentPath(cache, content.sri)).then(() => true) - } else { - return false - } - }) +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await rimraf(contentPath(cache, content.sri)) + return true + } else { + return false + } } diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js index b0aa18c12ba78..62388dc81d0fd 100644 --- a/node_modules/cacache/lib/content/write.js +++ b/node_modules/cacache/lib/content/write.js @@ -1,10 +1,11 @@ 'use strict' +const events = require('events') const util = require('util') const contentPath = require('./path') const fixOwner = require('../util/fix-owner') -const fs = require('fs') +const fs = require('@npmcli/fs') const moveFile = require('../util/move-file') const Minipass = require('minipass') const Pipeline = require('minipass-pipeline') @@ -15,8 +16,6 @@ const ssri = require('ssri') const uniqueFilename = require('unique-filename') const fsm = require('fs-minipass') -const writeFile = util.promisify(fs.writeFile) - module.exports = write async function write (cache, data, opts = {}) { @@ -36,7 +35,7 @@ async function write (cache, data, opts = {}) { const tmp = await makeTmp(cache, opts) try { - await writeFile(tmp.target, data, { flag: 'wx' }) + await fs.writeFile(tmp.target, data, { flag: 'wx' }) await moveToDestination(tmp, cache, sri, opts) return { integrity: sri, size: data.length } } finally { @@ -115,7 +114,21 @@ async function handleContent (inputStream, cache, opts) { } } -function pipeToTmp (inputStream, cache, tmpTarget, opts) { +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + let integrity let size const hashStream = ssri.integrityStream({ @@ -130,43 +143,28 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) { size = s }) - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - // NB: this can throw if the hashStream has a problem with - // it, and the data is fully written. but pipeToTmp is only - // called in promisory contexts where that is handled. - const pipeline = new Pipeline( - inputStream, - hashStream, - outStream - ) - - return pipeline.promise().then(() => ({ integrity, size })) + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } } -function makeTmp (cache, opts) { +async function makeTmp (cache, opts) { const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) + return { target: tmpTarget, moved: false, - })) + } } -function moveToDestination (tmp, cache, sri, opts) { +async function moveToDestination (tmp, cache, sri, opts) { const destination = contentPath(cache, sri) const destDir = path.dirname(destination) - return fixOwner - .mkdirfix(cache, destDir) - .then(() => { - return moveFile(tmp.target, destination) - }) - .then(() => { - tmp.moved = true - return fixOwner.chownr(cache, destination) - }) + await fixOwner.mkdirfix(cache, destDir) + await moveFile(tmp.target, destination) + tmp.moved = true + await fixOwner.chownr(cache, destination) } function sizeError (expected, found) { diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js index 9d4485624acbc..cbfa619099f90 100644 --- a/node_modules/cacache/lib/entry-index.js +++ b/node_modules/cacache/lib/entry-index.js @@ -2,7 +2,7 @@ const util = require('util') const crypto = require('crypto') -const fs = require('fs') +const fs = require('@npmcli/fs') const Minipass = require('minipass') const path = require('path') const ssri = require('ssri') @@ -17,11 +17,6 @@ const _rimraf = require('rimraf') const rimraf = util.promisify(_rimraf) rimraf.sync = _rimraf.sync -const appendFile = util.promisify(fs.appendFile) -const readFile = util.promisify(fs.readFile) -const readdir = util.promisify(fs.readdir) -const writeFile = util.promisify(fs.writeFile) - module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { super(`No cache entry for ${key} found in ${cache}`) @@ -85,7 +80,7 @@ async function compact (cache, key, matchFn, opts = {}) { } const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) await fixOwner.mkdirfix(cache, path.dirname(bucket)) // we use @npmcli/move-file directly here because we // want to overwrite the existing file @@ -118,7 +113,7 @@ async function compact (cache, key, matchFn, opts = {}) { module.exports.insert = insert -function insert (cache, key, integrity, opts = {}) { +async function insert (cache, key, integrity, opts = {}) { const { metadata, size } = opts const bucket = bucketPath(cache, key) const entry = { @@ -128,36 +123,32 @@ function insert (cache, key, integrity, opts = {}) { size, metadata, } - return fixOwner - .mkdirfix(cache, path.dirname(bucket)) - .then(() => { - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - }) - .then(() => fixOwner.chownr(cache, bucket)) - .catch((err) => { - if (err.code === 'ENOENT') { - return undefined - } + try { + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - }) - .then(() => { - return formatEntry(cache, entry) - }) + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + } + return formatEntry(cache, entry) } module.exports.insert.sync = insertSync @@ -187,25 +178,24 @@ function insertSync (cache, key, integrity, opts = {}) { module.exports.find = find -function find (cache, key) { +async function find (cache, key) { const bucket = bucketPath(cache, key) - return bucketEntries(bucket) - .then((entries) => { - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return null + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) } else { - throw err + return latest } - }) + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } } module.exports.find.sync = findSync @@ -257,67 +247,64 @@ function lsStream (cache) { const indexDir = bucketDir(cache) const stream = new Minipass({ objectMode: true }) - readdirOrEmpty(indexDir).then(buckets => Promise.all( - buckets.map(bucket => { + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { const bucketPath = path.join(indexDir, bucket) - return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( - subbuckets.map(subbucket => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - return readdirOrEmpty(subbucketPath).then(entries => Promise.all( - entries.map(entry => { - const entryPath = path.join(subbucketPath, entry) - return bucketEntries(entryPath).then(entries => - // using a Map here prevents duplicate keys from - // showing up twice, I guess? - entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - ).then(reduced => { - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - }).catch(err => { - if (err.code === 'ENOENT') { - return undefined - } - throw err - }) - }) - )) - }) - )) - }) - )) - .then( - () => stream.end(), - err => stream.emit('error', err) - ) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + }).catch(err => stream.emit('error', err)) return stream } module.exports.ls = ls -function ls (cache) { - return lsStream(cache).collect().then(entries => - entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) - ) +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) } module.exports.bucketEntries = bucketEntries -function bucketEntries (bucket, filter) { - return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +async function bucketEntries (bucket, filter) { + const data = await fs.readFile(bucket, 'utf8') + return _bucketEntries(data, filter) } module.exports.bucketEntries.sync = bucketEntriesSync @@ -406,7 +393,7 @@ function formatEntry (cache, entry, keepAll) { } function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { + return fs.readdir(dir).catch((err) => { if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { return [] } diff --git a/node_modules/cacache/lib/get.js b/node_modules/cacache/lib/get.js index 58f357b1da3d1..cc9d8f6796647 100644 --- a/node_modules/cacache/lib/get.js +++ b/node_modules/cacache/lib/get.js @@ -8,52 +8,48 @@ const index = require('./entry-index') const memo = require('./memoization') const read = require('./content/read') -function getData (cache, key, opts = {}) { +async function getData (cache, key, opts = {}) { const { integrity, memoize, size } = opts const memoized = memo.get(cache, key, opts) if (memoized && memoize !== false) { - return Promise.resolve({ + return { metadata: memoized.entry.metadata, data: memoized.data, integrity: memoized.entry.integrity, size: memoized.entry.size, - }) + } } - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } - return read(cache, entry.integrity, { integrity, size }).then((data) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } } module.exports = getData -function getDataByDigest (cache, key, opts = {}) { +async function getDataByDigest (cache, key, opts = {}) { const { integrity, memoize, size } = opts const memoized = memo.get.byDigest(cache, key, opts) if (memoized && memoize !== false) { - return Promise.resolve(memoized) + return memoized } - return read(cache, key, { integrity, size }).then((res) => { - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res - }) + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res } module.exports.byDigest = getDataByDigest @@ -131,36 +127,35 @@ function getStream (cache, key, opts = {}) { } const stream = new Pipeline() - index - .find(cache, key) - .then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) }) - .catch((err) => stream.emit('error', err)) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }).catch((err) => stream.emit('error', err)) return stream } @@ -204,27 +199,26 @@ function info (cache, key, opts = {}) { } module.exports.info = info -function copy (cache, key, dest, opts = {}) { - return index.find(cache, key, opts).then((entry) => { - if (!entry) { - throw new index.NotFoundError(cache, key) - } - return read.copy(cache, entry.integrity, dest, opts) - .then(() => { - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } - }) - }) +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } } module.exports.copy = copy -function copyByDigest (cache, key, dest, opts = {}) { - return read.copy(cache, key, dest, opts).then(() => key) +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key } + module.exports.copy.byDigest = copyByDigest module.exports.hasContent = read.hasContent diff --git a/node_modules/cacache/lib/put.js b/node_modules/cacache/lib/put.js index eed51874f9f64..9fc932d5f6dec 100644 --- a/node_modules/cacache/lib/put.js +++ b/node_modules/cacache/lib/put.js @@ -14,20 +14,16 @@ const putOpts = (opts) => ({ module.exports = putData -function putData (cache, key, data, opts = {}) { +async function putData (cache, key, data, opts = {}) { const { memoize } = opts opts = putOpts(opts) - return write(cache, data, opts).then((res) => { - return index - .insert(cache, key, res.integrity, { ...opts, size: res.size }) - .then((entry) => { - if (memoize) { - memo.put(cache, entry, data, opts) - } + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } - return res.integrity - }) - }) + return res.integrity } module.exports.stream = putStream @@ -68,17 +64,14 @@ function putStream (cache, key, opts = {}) { // last but not least, we write the index and emit hash and size, // and memoize if we're doing that pipeline.push(new Flush({ - flush () { + async flush () { if (!error) { - return index - .insert(cache, key, integrity, { ...opts, size }) - .then((entry) => { - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - }) + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) } }, })) diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js index bc14def4e405c..182fcb028f06c 100644 --- a/node_modules/cacache/lib/util/fix-owner.js +++ b/node_modules/cacache/lib/util/fix-owner.js @@ -33,40 +33,38 @@ const getSelf = () => { module.exports.chownr = fixOwner -function fixOwner (cache, filepath) { +async function fixOwner (cache, filepath) { if (!process.getuid) { // This platform doesn't need ownership fixing - return Promise.resolve() + return } getSelf() if (self.uid !== 0) { // almost certainly can't chown anyway - return Promise.resolve() + return } - return Promise.resolve(inferOwner(cache)).then((owner) => { - const { uid, gid } = owner + const { uid, gid } = await inferOwner(cache) - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) - }) + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) } module.exports.chownr.sync = fixOwnerSync @@ -105,26 +103,25 @@ function fixOwnerSync (cache, filepath) { module.exports.mkdirfix = mkdirfix -function mkdirfix (cache, p, cb) { +async function mkdirfix (cache, p, cb) { // we have to infer the owner _before_ making the directory, even though // we aren't going to use the results, since the cache itself might not // exist yet. If we mkdirp it, then our current uid/gid will be assumed // to be correct if it creates the cache folder in the process. - return Promise.resolve(inferOwner(cache)).then(() => { - return mkdirp(p) - .then((made) => { - if (made) { - return fixOwner(cache, made).then(() => made) - } - }) - .catch((err) => { - if (err.code === 'EEXIST') { - return fixOwner(cache, p).then(() => null) - } - - throw err - }) - }) + await inferOwner(cache) + try { + const made = await mkdirp(p) + if (made) { + await fixOwner(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + await fixOwner(cache, p) + return null + } + throw err + } } module.exports.mkdirfix.sync = mkdirfixSync diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js index 3739cea3df281..a0b40413cb56e 100644 --- a/node_modules/cacache/lib/util/move-file.js +++ b/node_modules/cacache/lib/util/move-file.js @@ -1,18 +1,13 @@ 'use strict' -const fs = require('fs') -const util = require('util') -const chmod = util.promisify(fs.chmod) -const unlink = util.promisify(fs.unlink) -const stat = util.promisify(fs.stat) +const fs = require('@npmcli/fs') const move = require('@npmcli/move-file') const pinflight = require('promise-inflight') module.exports = moveFile -function moveFile (src, dest) { - const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || - process.platform === 'win32' +async function moveFile (src, dest) { + const isWindows = process.platform === 'win32' // This isn't quite an fs.rename -- the assumption is that // if `dest` already exists, and we get certain errors while @@ -23,47 +18,39 @@ function moveFile (src, dest) { // content their own way. // // Note that, as the name suggests, this strictly only supports file moves. - return new Promise((resolve, reject) => { - fs.link(src, dest, (err) => { - if (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - return resolve() - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - return resolve() - } else { - return reject(err) + try { + await fs.link(src, dest) + } catch (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + } else { + throw err + } + } + try { + await Promise.all([ + fs.unlink(src), + !isWindows && fs.chmod(dest, '0444'), + ]) + } catch (e) { + return pinflight('cacache-move-file:' + dest, async () => { + await fs.stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err } - } else { - return resolve() - } - }) - }) - .then(() => { - // content should never change for any reason, so make it read-only - return Promise.all([ - unlink(src), - !isWindows && chmod(dest, '0444'), - ]) - }) - .catch(() => { - return pinflight('cacache-move-file:' + dest, () => { - return stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) }) + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) }) + } } diff --git a/node_modules/cacache/lib/util/tmp.js b/node_modules/cacache/lib/util/tmp.js index 0a5a50eba3061..b4437cfcbeed6 100644 --- a/node_modules/cacache/lib/util/tmp.js +++ b/node_modules/cacache/lib/util/tmp.js @@ -7,15 +7,13 @@ const path = require('path') module.exports.mkdir = mktmpdir -function mktmpdir (cache, opts = {}) { +async function mktmpdir (cache, opts = {}) { const { tmpPrefix } = opts const tmpDir = path.join(cache, 'tmp') - return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - .then(() => { - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) - }) + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) } module.exports.withTmp = withTmp diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js index a39fb6ce1d1dc..52692a01d192f 100644 --- a/node_modules/cacache/lib/verify.js +++ b/node_modules/cacache/lib/verify.js @@ -5,7 +5,7 @@ const util = require('util') const pMap = require('p-map') const contentPath = require('./content/path') const fixOwner = require('./util/fix-owner') -const fs = require('fs') +const fs = require('@npmcli/fs') const fsm = require('fs-minipass') const glob = util.promisify(require('glob')) const index = require('./entry-index') @@ -18,11 +18,6 @@ const globify = pattern => pattern.split('\\').join('/') const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) -const stat = util.promisify(fs.stat) -const truncate = util.promisify(fs.truncate) -const writeFile = util.promisify(fs.writeFile) -const readFile = util.promisify(fs.readFile) - const verifyOpts = (opts) => ({ concurrency: 20, log: { silly () {} }, @@ -31,7 +26,7 @@ const verifyOpts = (opts) => ({ module.exports = verify -function verify (cache, opts) { +async function verify (cache, opts) { opts = verifyOpts(opts) opts.log.silly('verify', 'verifying cache at', cache) @@ -45,56 +40,47 @@ function verify (cache, opts) { markEndTime, ] - return steps - .reduce((promise, step, i) => { - const label = step.name - const start = new Date() - return promise.then((stats) => { - return step(cache, opts).then((s) => { - s && - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - - stats.runTime[label] = end - start - return Promise.resolve(stats) - }) + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] }) - }, Promise.resolve({})) - .then((stats) => { - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats - }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats } -function markStartTime (cache, opts) { - return Promise.resolve({ startTime: new Date() }) +async function markStartTime (cache, opts) { + return { startTime: new Date() } } -function markEndTime (cache, opts) { - return Promise.resolve({ endTime: new Date() }) +async function markEndTime (cache, opts) { + return { endTime: new Date() } } -function fixPerms (cache, opts) { +async function fixPerms (cache, opts) { opts.log.silly('verify', 'fixing cache permissions') - return fixOwner - .mkdirfix(cache, cache) - .then(() => { - // TODO - fix file permissions too - return fixOwner.chownr(cache, cache) - }) - .then(() => null) + await fixOwner.mkdirfix(cache, cache) + // TODO - fix file permissions too + await fixOwner.chownr(cache, cache) + return null } // Implements a naive mark-and-sweep tracing garbage collector. @@ -106,7 +92,7 @@ function fixPerms (cache, opts) { // 4. If content is live, verify its checksum and delete it if it fails // 5. If content is not marked as live, rimraf it. // -function garbageCollect (cache, opts) { +async function garbageCollect (cache, opts) { opts.log.silly('verify', 'garbage collecting content') const indexStream = index.lsStream(cache) const liveContent = new Set() @@ -117,156 +103,135 @@ function garbageCollect (cache, opts) { liveContent.add(entry.integrity.toString()) }) - return new Promise((resolve, reject) => { + await new Promise((resolve, reject) => { indexStream.on('end', resolve).on('error', reject) - }).then(() => { - const contentDir = contentPath.contentDir(cache) - return glob(globify(path.join(contentDir, '**')), { - follow: false, - nodir: true, - nosort: true, - }).then((files) => { - return Promise.resolve({ - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - }).then((stats) => - pMap( - files, - (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - return verifyContent(f, integrity).then((info) => { - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - return stats - }) - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - return stat(f).then((s) => { - return rimraf(f).then(() => { - stats.reclaimedSize += s.size - return stats - }) - }) - } - }, - { concurrency: opts.concurrency } - ).then(() => stats) - ) - }) }) -} - -function verifyContent (filepath, sri) { - return stat(filepath) - .then((s) => { - const contentInfo = { - size: s.size, - valid: true, - } - return ssri - .checkStream(new fsm.ReadStream(filepath), sri) - .catch((err) => { - if (err.code !== 'EINTEGRITY') { - throw err - } - - return rimraf(filepath).then(() => { - contentInfo.valid = false - }) - }) - .then(() => contentInfo) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } + const contentDir = contentPath.contentDir(cache) + const files = await glob(globify(path.join(contentDir, '**')), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await fs.stat(f) + await rimraf(f) + stats.reclaimedSize += s.size } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await fs.stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { throw err - }) + } + + await rimraf(filepath) + contentInfo.valid = false + } + return contentInfo } -function rebuildIndex (cache, opts) { +async function rebuildIndex (cache, opts) { opts.log.silly('verify', 'rebuilding index') - return index.ls(cache).then((entries) => { - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) } } - return pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ).then(() => stats) - }) + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats } -function rebuildBucket (cache, bucket, stats, opts) { - return truncate(bucket._path).then(() => { - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - return bucket.reduce((promise, entry) => { - return promise.then(() => { - const content = contentPath(cache, entry.integrity) - return stat(content) - .then(() => { - return index - .insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - .then(() => { - stats.totalEntries++ - }) - }) - .catch((err) => { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - return - } - throw err - }) +async function rebuildBucket (cache, bucket, stats, opts) { + await fs.truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await fs.stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, }) - }, Promise.resolve()) - }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } } function cleanTmp (cache, opts) { @@ -278,7 +243,7 @@ function writeVerifile (cache, opts) { const verifile = path.join(cache, '_lastverified') opts.log.silly('verify', 'writing verifile to ' + verifile) try { - return writeFile(verifile, '' + +new Date()) + return fs.writeFile(verifile, `${Date.now()}`) } finally { fixOwner.chownr.sync(cache, verifile) } @@ -286,8 +251,7 @@ function writeVerifile (cache, opts) { module.exports.lastRun = lastRun -function lastRun (cache) { - return readFile(path.join(cache, '_lastverified'), 'utf8').then( - (data) => new Date(+data) - ) +async function lastRun (cache) { + const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) } diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index cd7e4de5e0cba..8e54901b4562a 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "16.0.7", + "version": "16.1.0", "cache-version": { "content": "2", "index": "5" @@ -69,20 +69,16 @@ }, "devDependencies": { "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.4.1", + "@npmcli/template-oss": "3.4.3", "tap": "^16.0.0" }, - "tap": { - "100": true, - "test-regex": "test/[^/]*.js" - }, "engines": { "node": "^12.13.0 || ^14.15.0 || >=16.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "3.4.1" + "version": "3.4.3" }, "author": "GitHub Inc." } diff --git a/package-lock.json b/package-lock.json index 3ef6ead05e562..83d078741d946 100644 --- a/package-lock.json +++ b/package-lock.json @@ -96,7 +96,7 @@ "@npmcli/run-script": "^3.0.1", "abbrev": "~1.1.1", "archy": "~1.0.0", - "cacache": "^16.0.7", + "cacache": "^16.1.0", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0", @@ -1577,9 +1577,9 @@ } }, "node_modules/cacache": { - "version": "16.0.7", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.7.tgz", - "integrity": "sha512-a4zfQpp5vm4Ipdvbj+ZrPonikRhm6WBEd4zT1Yc1DXsmAxrPgDwWBLF/u/wTVXSFPIgOJ1U3ghSa2Xm4s3h28w==", + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.0.tgz", + "integrity": "sha512-Pk4aQkwCW82A4jGKFvcGkQFqZcMspfP9YWq9Pr87/ldDvlWf718zeI6KWCdKt/jeihu6BytHRUicJPB1K2k8EQ==", "inBundle": true, "dependencies": { "@npmcli/fs": "^2.1.0", @@ -11224,9 +11224,9 @@ } }, "cacache": { - "version": "16.0.7", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.0.7.tgz", - "integrity": "sha512-a4zfQpp5vm4Ipdvbj+ZrPonikRhm6WBEd4zT1Yc1DXsmAxrPgDwWBLF/u/wTVXSFPIgOJ1U3ghSa2Xm4s3h28w==", + "version": "16.1.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.0.tgz", + "integrity": "sha512-Pk4aQkwCW82A4jGKFvcGkQFqZcMspfP9YWq9Pr87/ldDvlWf718zeI6KWCdKt/jeihu6BytHRUicJPB1K2k8EQ==", "requires": { "@npmcli/fs": "^2.1.0", "@npmcli/move-file": "^2.0.0", diff --git a/package.json b/package.json index e654f94102d9d..621be8335705a 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "@npmcli/run-script": "^3.0.1", "abbrev": "~1.1.1", "archy": "~1.0.0", - "cacache": "^16.0.7", + "cacache": "^16.1.0", "chalk": "^4.1.2", "chownr": "^2.0.0", "cli-columns": "^4.0.0",