diff --git a/node_modules/cacache/README.md b/node_modules/cacache/README.md
index 0c315595abd34..6dc11babfa62a 100644
--- a/node_modules/cacache/README.md
+++ b/node_modules/cacache/README.md
@@ -458,13 +458,17 @@ cacache.rm.all(cachePath).then(() => {
})
```
-#### `> cacache.rm.entry(cache, key) -> Promise`
+#### `> cacache.rm.entry(cache, key, [opts]) -> Promise`
Alias: `cacache.rm`
Removes the index entry for `key`. Content will still be accessible if
requested directly by content address ([`get.stream.byDigest`](#get-stream)).
+By default, this appends a new entry to the index with an integrity of `null`.
+If `opts.removeFully` is set to `true` then the index file itself will be
+physically deleted rather than appending a `null`.
+
To remove the content itself (which might still be used by other entries), use
[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
[`verify`](#verify).
@@ -491,12 +495,21 @@ cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
})
```
-#### `> cacache.index.compact(cache, key, matchFn) -> Promise`
+#### `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise`
Uses `matchFn`, which must be a synchronous function that accepts two entries
and returns a boolean indicating whether or not the two entries match, to
deduplicate all entries in the cache for the given `key`.
+If `opts.validateEntry` is provided, it will be called as a function with the
+only parameter being a single index entry. The function must return a Boolean,
+if it returns `true` the entry is considered valid and will be kept in the index,
+if it returns `false` the entry will be removed from the index.
+
+If `opts.validateEntry` is not provided, however, every entry in the index will
+be deduplicated and kept until the first `null` integrity is reached, removing
+all entries that were written before the `null`.
+
The deduplicated list of entries is both written to the index, replacing the
existing content, and returned in the Promise.
diff --git a/node_modules/cacache/get.js b/node_modules/cacache/get.js
index b6bae1e504eba..fe710bbd68def 100644
--- a/node_modules/cacache/get.js
+++ b/node_modules/cacache/get.js
@@ -32,18 +32,18 @@ function getData (byDigest, cache, key, opts = {}) {
metadata: memoized.entry.metadata,
data: memoized.data,
integrity: memoized.entry.integrity,
- size: memoized.entry.size
+ size: memoized.entry.size,
}
)
}
return (byDigest ? Promise.resolve(null) : index.find(cache, key, opts)).then(
(entry) => {
- if (!entry && !byDigest) {
+ if (!entry && !byDigest)
throw new index.NotFoundError(cache, key)
- }
+
return read(cache, byDigest ? key : entry.integrity, {
integrity,
- size
+ size,
})
.then((data) =>
byDigest
@@ -52,15 +52,15 @@ function getData (byDigest, cache, key, opts = {}) {
data,
metadata: entry.metadata,
size: entry.size,
- integrity: entry.integrity
+ integrity: entry.integrity,
}
)
.then((res) => {
- if (memoize && byDigest) {
+ if (memoize && byDigest)
memo.put.byDigest(cache, key, res, opts)
- } else if (memoize) {
+ else if (memoize)
memo.put(cache, entry, res.data, opts)
- }
+
return res
})
}
@@ -86,16 +86,16 @@ function getDataSync (byDigest, cache, key, opts = {}) {
metadata: memoized.entry.metadata,
data: memoized.data,
integrity: memoized.entry.integrity,
- size: memoized.entry.size
+ size: memoized.entry.size,
}
}
const entry = !byDigest && index.find.sync(cache, key, opts)
- if (!entry && !byDigest) {
+ if (!entry && !byDigest)
throw new index.NotFoundError(cache, key)
- }
+
const data = read.sync(cache, byDigest ? key : entry.integrity, {
integrity: integrity,
- size: size
+ size: size,
})
const res = byDigest
? data
@@ -103,13 +103,13 @@ function getDataSync (byDigest, cache, key, opts = {}) {
metadata: entry.metadata,
data: data,
size: entry.size,
- integrity: entry.integrity
+ integrity: entry.integrity,
}
- if (memoize && byDigest) {
+ if (memoize && byDigest)
memo.put.byDigest(cache, key, res, opts)
- } else if (memoize) {
+ else if (memoize)
memo.put(cache, entry, res.data, opts)
- }
+
return res
}
@@ -129,17 +129,16 @@ const getMemoizedStream = (memoized) => {
function getStream (cache, key, opts = {}) {
const { memoize, size } = opts
const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
+ if (memoized && memoize !== false)
return getMemoizedStream(memoized)
- }
const stream = new Pipeline()
index
.find(cache, key)
.then((entry) => {
- if (!entry) {
+ if (!entry)
throw new index.NotFoundError(cache, key)
- }
+
stream.emit('metadata', entry.metadata)
stream.emit('integrity', entry.integrity)
stream.emit('size', entry.size)
@@ -178,9 +177,9 @@ function getStreamDigest (cache, integrity, opts = {}) {
return stream
} else {
const stream = read.readStream(cache, integrity, opts)
- if (!memoize) {
+ if (!memoize)
return stream
- }
+
const memoStream = new Collect.PassThrough()
memoStream.on('collect', data => memo.put.byDigest(
cache,
@@ -197,11 +196,10 @@ module.exports.info = info
function info (cache, key, opts = {}) {
const { memoize } = opts
const memoized = memo.get(cache, key, opts)
- if (memoized && memoize !== false) {
+ if (memoized && memoize !== false)
return Promise.resolve(memoized.entry)
- } else {
+ else
return index.find(cache, key)
- }
}
module.exports.hasContent = read.hasContent
@@ -224,9 +222,9 @@ function copy (byDigest, cache, key, dest, opts = {}) {
? Promise.resolve(null)
: index.find(cache, key, opts)
).then((entry) => {
- if (!entry && !byDigest) {
+ if (!entry && !byDigest)
throw new index.NotFoundError(cache, key)
- }
+
return read
.copy(cache, byDigest ? key : entry.integrity, dest, opts)
.then(() => {
@@ -235,7 +233,7 @@ function copy (byDigest, cache, key, dest, opts = {}) {
: {
metadata: entry.metadata,
size: entry.size,
- integrity: entry.integrity
+ integrity: entry.integrity,
}
})
})
@@ -248,7 +246,7 @@ function copy (byDigest, cache, key, dest, opts = {}) {
: {
metadata: res.metadata,
size: res.size,
- integrity: res.integrity
+ integrity: res.integrity,
}
})
})
diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js
index 7cc16482d44c8..034e8eee05b10 100644
--- a/node_modules/cacache/lib/content/read.js
+++ b/node_modules/cacache/lib/content/read.js
@@ -20,17 +20,16 @@ function read (cache, integrity, opts = {}) {
// get size
return lstat(cpath).then(stat => ({ stat, cpath, sri }))
}).then(({ stat, cpath, sri }) => {
- if (typeof size === 'number' && stat.size !== size) {
+ if (typeof size === 'number' && stat.size !== size)
throw sizeError(size, stat.size)
- }
- if (stat.size > MAX_SINGLE_READ_SIZE) {
+
+ if (stat.size > MAX_SINGLE_READ_SIZE)
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
- }
return readFile(cpath, null).then((data) => {
- if (!ssri.checkData(data, sri)) {
+ if (!ssri.checkData(data, sri))
throw integrityError(sri, cpath)
- }
+
return data
})
})
@@ -40,11 +39,11 @@ const readPipeline = (cpath, size, sri, stream) => {
stream.push(
new fsm.ReadStream(cpath, {
size,
- readSize: MAX_SINGLE_READ_SIZE
+ readSize: MAX_SINGLE_READ_SIZE,
}),
ssri.integrityStream({
integrity: sri,
- size
+ size,
})
)
return stream
@@ -56,13 +55,11 @@ function readSync (cache, integrity, opts = {}) {
const { size } = opts
return withContentSriSync(cache, integrity, (cpath, sri) => {
const data = fs.readFileSync(cpath)
- if (typeof size === 'number' && size !== data.length) {
+ if (typeof size === 'number' && size !== data.length)
throw sizeError(size, data.length)
- }
- if (ssri.checkData(data, sri)) {
+ if (ssri.checkData(data, sri))
return data
- }
throw integrityError(sri, cpath)
})
@@ -78,9 +75,9 @@ function readStream (cache, integrity, opts = {}) {
// just lstat to ensure it exists
return lstat(cpath).then((stat) => ({ stat, cpath, sri }))
}).then(({ stat, cpath, sri }) => {
- if (typeof size === 'number' && size !== stat.size) {
+ if (typeof size === 'number' && size !== stat.size)
return stream.emit('error', sizeError(size, stat.size))
- }
+
readPipeline(cpath, stat.size, sri, stream)
}, er => stream.emit('error', er))
@@ -109,22 +106,21 @@ function copySync (cache, integrity, dest) {
module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
- if (!integrity) {
+ if (!integrity)
return Promise.resolve(false)
- }
+
return withContentSri(cache, integrity, (cpath, sri) => {
return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat }))
}).catch((err) => {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return false
- }
+
if (err.code === 'EPERM') {
/* istanbul ignore else */
- if (process.platform !== 'win32') {
+ if (process.platform !== 'win32')
throw err
- } else {
+ else
return false
- }
}
})
}
@@ -132,24 +128,23 @@ function hasContent (cache, integrity) {
module.exports.hasContent.sync = hasContentSync
function hasContentSync (cache, integrity) {
- if (!integrity) {
+ if (!integrity)
return false
- }
+
return withContentSriSync(cache, integrity, (cpath, sri) => {
try {
const stat = fs.lstatSync(cpath)
return { size: stat.size, sri, stat }
} catch (err) {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return false
- }
+
if (err.code === 'EPERM') {
/* istanbul ignore else */
- if (process.platform !== 'win32') {
+ if (process.platform !== 'win32')
throw err
- } else {
+ else
return false
- }
}
}
})
@@ -167,7 +162,8 @@ function withContentSri (cache, integrity, fn) {
const cpath = contentPath(cache, digests[0])
return fn(cpath, digests[0])
} else {
- // Can't use race here because a generic error can happen before a ENOENT error, and can happen before a valid result
+ // Can't use race here because a generic error can happen before
+ // a ENOENT error, and can happen before a valid result
return Promise
.all(digests.map((meta) => {
return withContentSri(cache, meta, fn)
@@ -184,15 +180,13 @@ function withContentSri (cache, integrity, fn) {
.then((results) => {
// Return the first non error if it is found
const result = results.find((r) => !(r instanceof Error))
- if (result) {
+ if (result)
return result
- }
// Throw the No matching content found error
const enoentError = results.find((r) => r.code === 'ENOENT')
- if (enoentError) {
+ if (enoentError)
throw enoentError
- }
// Throw generic error
throw results.find((r) => r instanceof Error)
diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js
index 50612364e9b48..6a3d1a3d02340 100644
--- a/node_modules/cacache/lib/content/rm.js
+++ b/node_modules/cacache/lib/content/rm.js
@@ -11,10 +11,9 @@ module.exports = rm
function rm (cache, integrity) {
return hasContent(cache, integrity).then((content) => {
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
- if (content && content.sri) {
+ if (content && content.sri)
return rimraf(contentPath(cache, content.sri)).then(() => true)
- } else {
+ else
return false
- }
})
}
diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js
index e8f3e3534940c..dde1bd1dd5dae 100644
--- a/node_modules/cacache/lib/content/write.js
+++ b/node_modules/cacache/lib/content/write.js
@@ -22,16 +22,15 @@ module.exports = write
function write (cache, data, opts = {}) {
const { algorithms, size, integrity } = opts
- if (algorithms && algorithms.length > 1) {
+ if (algorithms && algorithms.length > 1)
throw new Error('opts.algorithms only supports a single algorithm for now')
- }
- if (typeof size === 'number' && data.length !== size) {
+
+ if (typeof size === 'number' && data.length !== size)
return Promise.reject(sizeError(size, data.length))
- }
+
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
- if (integrity && !ssri.checkData(data, integrity, opts)) {
+ if (integrity && !ssri.checkData(data, integrity, opts))
return Promise.reject(checksumError(integrity, sri))
- }
return disposer(makeTmp(cache, opts), makeTmpDisposer,
(tmp) => {
@@ -112,13 +111,17 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) {
const hashStream = ssri.integrityStream({
integrity: opts.integrity,
algorithms: opts.algorithms,
- size: opts.size
+ size: opts.size,
+ })
+ hashStream.on('integrity', i => {
+ integrity = i
+ })
+ hashStream.on('size', s => {
+ size = s
})
- hashStream.on('integrity', i => { integrity = i })
- hashStream.on('size', s => { size = s })
const outStream = new fsm.WriteStream(tmpTarget, {
- flags: 'wx'
+ flags: 'wx',
})
// NB: this can throw if the hashStream has a problem with
@@ -132,21 +135,23 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts) {
return pipeline.promise()
.then(() => ({ integrity, size }))
- .catch(er => rimraf(tmpTarget).then(() => { throw er }))
+ .catch(er => rimraf(tmpTarget).then(() => {
+ throw er
+ }))
}
function makeTmp (cache, opts) {
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({
target: tmpTarget,
- moved: false
+ moved: false,
}))
}
function makeTmpDisposer (tmp) {
- if (tmp.moved) {
+ if (tmp.moved)
return Promise.resolve()
- }
+
return rimraf(tmp.target)
}
diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js
index 8827ebb541b2d..71aac5ed75b14 100644
--- a/node_modules/cacache/lib/entry-index.js
+++ b/node_modules/cacache/lib/entry-index.js
@@ -14,7 +14,9 @@ const fixOwner = require('./util/fix-owner')
const hashToSegments = require('./util/hash-to-segments')
const indexV = require('../package.json')['cache-version'].index
const moveFile = require('@npmcli/move-file')
-const rimraf = util.promisify(require('rimraf'))
+const _rimraf = require('rimraf')
+const rimraf = util.promisify(_rimraf)
+rimraf.sync = _rimraf.sync
const appendFile = util.promisify(fs.appendFile)
const readFile = util.promisify(fs.readFile)
@@ -35,15 +37,30 @@ module.exports.compact = compact
async function compact (cache, key, matchFn, opts = {}) {
const bucket = bucketPath(cache, key)
const entries = await bucketEntries(bucket)
- // reduceRight because the bottom-most result is the newest
+ const newEntries = []
+ // we loop backwards because the bottom-most result is the newest
// since we add new entries with appendFile
- const newEntries = entries.reduceRight((acc, newEntry) => {
- if (!acc.find((oldEntry) => matchFn(oldEntry, newEntry))) {
- acc.push(newEntry)
- }
-
- return acc
- }, [])
+ for (let i = entries.length - 1; i >= 0; --i) {
+ const entry = entries[i]
+ // a null integrity could mean either a delete was appended
+ // or the user has simply stored an index that does not map
+ // to any content. we determine if the user wants to keep the
+ // null integrity based on the validateEntry function passed in options.
+ // if the integrity is null and no validateEntry is provided, we break
+ // as we consider the null integrity to be a deletion of everything
+ // that came before it.
+ if (entry.integrity === null && !opts.validateEntry)
+ break
+
+ // if this entry is valid, and it is either the first entry or
+ // the newEntries array doesn't already include an entry that
+ // matches this one based on the provided matchFn, then we add
+ // it to the beginning of our list
+ if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+ (newEntries.length === 0 ||
+ !newEntries.find((oldEntry) => matchFn(oldEntry, entry))))
+ newEntries.unshift(entry)
+ }
const newIndex = '\n' + newEntries.map((entry) => {
const stringified = JSON.stringify(entry)
@@ -56,14 +73,13 @@ async function compact (cache, key, matchFn, opts = {}) {
await fixOwner.mkdirfix(cache, path.dirname(target))
return {
target,
- moved: false
+ moved: false,
}
}
const teardown = async (tmp) => {
- if (!tmp.moved) {
+ if (!tmp.moved)
return rimraf(tmp.target)
- }
}
const write = async (tmp) => {
@@ -76,16 +92,20 @@ async function compact (cache, key, matchFn, opts = {}) {
try {
await fixOwner.chownr(cache, bucket)
} catch (err) {
- if (err.code !== 'ENOENT') {
+ if (err.code !== 'ENOENT')
throw err
- }
}
}
// write the file atomically
await disposer(setup(), teardown, write)
- return newEntries.map((entry) => formatEntry(cache, entry, true))
+ // we reverse the list we generated such that the newest
+ // entries come first in order to make looping through them easier
+ // the true passed to formatEntry tells it to keep null
+ // integrity values, if they made it this far it's because
+ // validateEntry returned true, and as such we should return it
+ return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
}
module.exports.insert = insert
@@ -98,7 +118,7 @@ function insert (cache, key, integrity, opts = {}) {
integrity: integrity && ssri.stringify(integrity),
time: Date.now(),
size,
- metadata
+ metadata,
}
return fixOwner
.mkdirfix(cache, path.dirname(bucket))
@@ -110,14 +130,15 @@ function insert (cache, key, integrity, opts = {}) {
// another while still preserving the string length of the JSON in
// question. So, we just slap the length in there and verify it on read.
//
- // Thanks to @isaacs for the whiteboarding session that ended up with this.
+ // Thanks to @isaacs for the whiteboarding session that ended up with
+ // this.
return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
})
.then(() => fixOwner.chownr(cache, bucket))
.catch((err) => {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return undefined
- }
+
throw err
// There's a class of race conditions that happen when things get deleted
// during fixOwner, or between the two mkdirfix/chownr calls.
@@ -140,7 +161,7 @@ function insertSync (cache, key, integrity, opts = {}) {
integrity: integrity && ssri.stringify(integrity),
time: Date.now(),
size,
- metadata
+ metadata,
}
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
const stringified = JSON.stringify(entry)
@@ -148,9 +169,8 @@ function insertSync (cache, key, integrity, opts = {}) {
try {
fixOwner.chownr.sync(cache, bucket)
} catch (err) {
- if (err.code !== 'ENOENT') {
+ if (err.code !== 'ENOENT')
throw err
- }
}
return formatEntry(cache, entry)
}
@@ -162,19 +182,17 @@ function find (cache, key) {
return bucketEntries(bucket)
.then((entries) => {
return entries.reduce((latest, next) => {
- if (next && next.key === key) {
+ if (next && next.key === key)
return formatEntry(cache, next)
- } else {
+ else
return latest
- }
}, null)
})
.catch((err) => {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return null
- } else {
+ else
throw err
- }
})
}
@@ -184,31 +202,37 @@ function findSync (cache, key) {
const bucket = bucketPath(cache, key)
try {
return bucketEntriesSync(bucket).reduce((latest, next) => {
- if (next && next.key === key) {
+ if (next && next.key === key)
return formatEntry(cache, next)
- } else {
+ else
return latest
- }
}, null)
} catch (err) {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return null
- } else {
+ else
throw err
- }
}
}
module.exports.delete = del
-function del (cache, key, opts) {
- return insert(cache, key, null, opts)
+function del (cache, key, opts = {}) {
+ if (!opts.removeFully)
+ return insert(cache, key, null, opts)
+
+ const bucket = bucketPath(cache, key)
+ return rimraf(bucket)
}
module.exports.delete.sync = delSync
-function delSync (cache, key, opts) {
- return insertSync(cache, key, null, opts)
+function delSync (cache, key, opts = {}) {
+ if (!opts.removeFully)
+ return insertSync(cache, key, null, opts)
+
+ const bucket = bucketPath(cache, key)
+ return rimraf.sync(bucket)
}
module.exports.lsStream = lsStream
@@ -239,12 +263,12 @@ function lsStream (cache) {
// reduced is a map of key => entry
for (const entry of reduced.values()) {
const formatted = formatEntry(cache, entry)
- if (formatted) {
+ if (formatted)
stream.write(formatted)
- }
}
}).catch(err => {
- if (err.code === 'ENOENT') { return undefined }
+ if (err.code === 'ENOENT')
+ return undefined
throw err
})
})
@@ -288,9 +312,9 @@ function bucketEntriesSync (bucket, filter) {
function _bucketEntries (data, filter) {
const entries = []
data.split('\n').forEach((entry) => {
- if (!entry) {
+ if (!entry)
return
- }
+
const pieces = entry.split('\t')
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
// Hash is no good! Corruption or malice? Doesn't matter!
@@ -304,9 +328,8 @@ function _bucketEntries (data, filter) {
// Entry is corrupted!
return
}
- if (obj) {
+ if (obj)
entries.push(obj)
- }
})
return entries
}
@@ -348,24 +371,23 @@ function hash (str, digest) {
function formatEntry (cache, entry, keepAll) {
// Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity && !keepAll) {
+ if (!entry.integrity && !keepAll)
return null
- }
+
return {
key: entry.key,
integrity: entry.integrity,
path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
size: entry.size,
time: entry.time,
- metadata: entry.metadata
+ metadata: entry.metadata,
}
}
function readdirOrEmpty (dir) {
return readdir(dir).catch((err) => {
- if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR')
return []
- }
throw err
})
diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js
index 185141d8eadad..d5465f39fc581 100644
--- a/node_modules/cacache/lib/memoization.js
+++ b/node_modules/cacache/lib/memoization.js
@@ -8,7 +8,7 @@ const MAX_AGE = 3 * 60 * 1000
const MEMOIZED = new LRU({
max: MAX_SIZE,
maxAge: MAX_AGE,
- length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length
+ length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
})
module.exports.clearMemoized = clearMemoized
@@ -62,13 +62,12 @@ class ObjProxy {
}
function pickMem (opts) {
- if (!opts || !opts.memoize) {
+ if (!opts || !opts.memoize)
return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
+ else if (opts.memoize.get && opts.memoize.set)
return opts.memoize
- } else if (typeof opts.memoize === 'object') {
+ else if (typeof opts.memoize === 'object')
return new ObjProxy(opts.memoize)
- } else {
+ else
return MEMOIZED
- }
}
diff --git a/node_modules/cacache/lib/util/disposer.js b/node_modules/cacache/lib/util/disposer.js
index 8a24ad2f2a2a2..aa8aed54da551 100644
--- a/node_modules/cacache/lib/util/disposer.js
+++ b/node_modules/cacache/lib/util/disposer.js
@@ -8,9 +8,9 @@ function disposer (creatorFn, disposerFn, fn) {
.then(
// disposer resolved, do something with original fn's promise
() => {
- if (shouldThrow) {
+ if (shouldThrow)
throw result
- }
+
return result
},
// Disposer fn failed, crash process
diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js
index 9afa638a8c839..90ffece524f54 100644
--- a/node_modules/cacache/lib/util/fix-owner.js
+++ b/node_modules/cacache/lib/util/fix-owner.js
@@ -49,9 +49,8 @@ function fixOwner (cache, filepath) {
const { uid, gid } = owner
// No need to override if it's already what we used.
- if (self.uid === uid && self.gid === gid) {
+ if (self.uid === uid && self.gid === gid)
return
- }
return inflight('fixOwner: fixing ownership on ' + filepath, () =>
chownr(
@@ -59,9 +58,9 @@ function fixOwner (cache, filepath) {
typeof uid === 'number' ? uid : self.uid,
typeof gid === 'number' ? gid : self.gid
).catch((err) => {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return null
- }
+
throw err
})
)
@@ -94,9 +93,9 @@ function fixOwnerSync (cache, filepath) {
)
} catch (err) {
// only catch ENOENT, any other error is a problem.
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return null
- }
+
throw err
}
}
@@ -111,14 +110,13 @@ function mkdirfix (cache, p, cb) {
return Promise.resolve(inferOwner(cache)).then(() => {
return mkdirp(p)
.then((made) => {
- if (made) {
+ if (made)
return fixOwner(cache, made).then(() => made)
- }
})
.catch((err) => {
- if (err.code === 'EEXIST') {
+ if (err.code === 'EEXIST')
return fixOwner(cache, p).then(() => null)
- }
+
throw err
})
})
@@ -138,8 +136,7 @@ function mkdirfixSync (cache, p) {
if (err.code === 'EEXIST') {
fixOwnerSync(cache, p)
return null
- } else {
+ } else
throw err
- }
}
}
diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js
index 84130b2e9ffb8..c3f9e35eb99c7 100644
--- a/node_modules/cacache/lib/util/move-file.js
+++ b/node_modules/cacache/lib/util/move-file.js
@@ -38,19 +38,17 @@ function moveFile (src, dest) {
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
// file already exists, so whatever
return resolve()
- } else {
+ } else
return reject(err)
- }
- } else {
+ } else
return resolve()
- }
})
})
.then(() => {
// content should never change for any reason, so make it read-only
return Promise.all([
unlink(src),
- !isWindows && chmod(dest, '0444')
+ !isWindows && chmod(dest, '0444'),
])
})
.catch(() => {
diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js
index 5a011a3f1d2cb..e9d679eceaf51 100644
--- a/node_modules/cacache/lib/verify.js
+++ b/node_modules/cacache/lib/verify.js
@@ -24,7 +24,7 @@ const readFile = util.promisify(fs.readFile)
const verifyOpts = (opts) => ({
concurrency: 20,
log: { silly () {} },
- ...opts
+ ...opts,
})
module.exports = verify
@@ -40,7 +40,7 @@ function verify (cache, opts) {
rebuildIndex,
cleanTmp,
writeVerifile,
- markEndTime
+ markEndTime,
]
return steps
@@ -54,9 +54,9 @@ function verify (cache, opts) {
stats[k] = s[k]
})
const end = new Date()
- if (!stats.runTime) {
+ if (!stats.runTime)
stats.runTime = {}
- }
+
stats.runTime[label] = end - start
return Promise.resolve(stats)
})
@@ -108,9 +108,9 @@ function garbageCollect (cache, opts) {
const indexStream = index.lsStream(cache)
const liveContent = new Set()
indexStream.on('data', (entry) => {
- if (opts.filter && !opts.filter(entry)) {
+ if (opts.filter && !opts.filter(entry))
return
- }
+
liveContent.add(entry.integrity.toString())
})
return new Promise((resolve, reject) => {
@@ -120,14 +120,14 @@ function garbageCollect (cache, opts) {
return glob(path.join(contentDir, '**'), {
follow: false,
nodir: true,
- nosort: true
+ nosort: true,
}).then((files) => {
return Promise.resolve({
verifiedContent: 0,
reclaimedCount: 0,
reclaimedSize: 0,
badContentCount: 0,
- keptSize: 0
+ keptSize: 0,
}).then((stats) =>
pMap(
files,
@@ -171,14 +171,14 @@ function verifyContent (filepath, sri) {
.then((s) => {
const contentInfo = {
size: s.size,
- valid: true
+ valid: true,
}
return ssri
.checkStream(new fsm.ReadStream(filepath), sri)
.catch((err) => {
- if (err.code !== 'EINTEGRITY') {
+ if (err.code !== 'EINTEGRITY')
throw err
- }
+
return rimraf(filepath).then(() => {
contentInfo.valid = false
})
@@ -186,9 +186,9 @@ function verifyContent (filepath, sri) {
.then(() => contentInfo)
})
.catch((err) => {
- if (err.code === 'ENOENT') {
+ if (err.code === 'ENOENT')
return { size: 0, valid: false }
- }
+
throw err
})
}
@@ -199,7 +199,7 @@ function rebuildIndex (cache, opts) {
const stats = {
missingContent: 0,
rejectedEntries: 0,
- totalEntries: 0
+ totalEntries: 0,
}
const buckets = {}
for (const k in entries) {
@@ -209,9 +209,9 @@ function rebuildIndex (cache, opts) {
const entry = entries[k]
const excluded = opts.filter && !opts.filter(entry)
excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
+ if (buckets[hashed] && !excluded)
buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
+ else if (buckets[hashed] && excluded) {
// skip
} else if (excluded) {
buckets[hashed] = []
@@ -244,7 +244,7 @@ function rebuildBucket (cache, bucket, stats, opts) {
return index
.insert(cache, entry.key, entry.integrity, {
metadata: entry.metadata,
- size: entry.size
+ size: entry.size,
})
.then(() => {
stats.totalEntries++
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index aefa5aae42585..3c2e65c0404a0 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
{
"name": "cacache",
- "version": "15.1.0",
+ "version": "15.2.0",
"cache-version": {
"content": "2",
"index": "5"
@@ -13,15 +13,17 @@
],
"scripts": {
"benchmarks": "node test/benchmarks",
- "lint": "standard",
- "postrelease": "npm publish",
- "posttest": "npm run lint",
- "prepublishOnly": "git push --follow-tags",
- "prerelease": "npm t",
- "release": "standard-version -s",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push origin --follow-tags",
"test": "tap",
+ "snap": "tap",
"coverage": "tap",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test"
+ "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+ "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"",
+ "npmclilint": "npmcli-lint",
+ "lintfix": "npm run lint -- --fix",
+ "postsnap": "npm run lintfix --"
},
"repository": "https://github.com/npm/cacache",
"keywords": [
@@ -39,23 +41,6 @@
"disk cache",
"disk storage"
],
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org",
- "twitter": "maybekatz"
- },
- "contributors": [
- {
- "name": "Charlotte Spencer",
- "email": "charlottelaspencer@gmail.com",
- "twitter": "charlotteis"
- },
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org",
- "twitter": "ReBeccaOrg"
- }
- ],
"license": "ISC",
"dependencies": {
"@npmcli/move-file": "^1.0.1",
@@ -77,11 +62,10 @@
"unique-filename": "^1.1.1"
},
"devDependencies": {
+ "@npmcli/lint": "^1.0.1",
"benchmark": "^2.1.4",
"chalk": "^4.0.0",
"require-inject": "^1.4.4",
- "standard": "^14.3.1",
- "standard-version": "^7.1.0",
"tacks": "^1.3.0",
"tap": "^15.0.9"
},
diff --git a/node_modules/cacache/put.js b/node_modules/cacache/put.js
index eb21aa867173f..84e9562bc33ab 100644
--- a/node_modules/cacache/put.js
+++ b/node_modules/cacache/put.js
@@ -9,7 +9,7 @@ const Pipeline = require('minipass-pipeline')
const putOpts = (opts) => ({
algorithms: ['sha512'],
- ...opts
+ ...opts,
})
module.exports = putData
@@ -21,9 +21,9 @@ function putData (cache, key, data, opts = {}) {
return index
.insert(cache, key, res.integrity, { ...opts, size: res.size })
.then((entry) => {
- if (memoize) {
+ if (memoize)
memo.put(cache, entry, data, opts)
- }
+
return res.integrity
})
})
@@ -67,17 +67,16 @@ function putStream (cache, key, opts = {}) {
return index
.insert(cache, key, integrity, { ...opts, size })
.then((entry) => {
- if (memoize && memoData) {
+ if (memoize && memoData)
memo.put(cache, entry, memoData, opts)
- }
- if (integrity) {
+
+ if (integrity)
pipeline.emit('integrity', integrity)
- }
- if (size) {
+
+ if (size)
pipeline.emit('size', size)
- }
})
- }
+ },
}))
return pipeline
diff --git a/node_modules/cacache/rm.js b/node_modules/cacache/rm.js
index 7dd4e8c8b07f1..f2ef6b190f457 100644
--- a/node_modules/cacache/rm.js
+++ b/node_modules/cacache/rm.js
@@ -11,9 +11,9 @@ const rmContent = require('./lib/content/rm')
module.exports = entry
module.exports.entry = entry
-function entry (cache, key) {
+function entry (cache, key, opts) {
memo.clearMemoized()
- return index.delete(cache, key)
+ return index.delete(cache, key, opts)
}
module.exports.content = content
diff --git a/package-lock.json b/package-lock.json
index 166c77a2a4199..995968e6fd6b9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -87,7 +87,7 @@
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
"byte-size": "^7.0.1",
- "cacache": "^15.1.0",
+ "cacache": "^15.2.0",
"chalk": "^4.1.0",
"chownr": "^2.0.0",
"cli-columns": "^3.1.2",
@@ -1482,9 +1482,9 @@
}
},
"node_modules/cacache": {
- "version": "15.1.0",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.1.0.tgz",
- "integrity": "sha512-mfx0C+mCfWjD1PnwQ9yaOrwG1ou9FkKnx0SvzUHWdFt7r7GaRtzT+9M8HAvLu62zIHtnpQ/1m93nWNDCckJGXQ==",
+ "version": "15.2.0",
+ "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz",
+ "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==",
"inBundle": true,
"dependencies": {
"@npmcli/move-file": "^1.0.1",
@@ -11391,9 +11391,9 @@
"integrity": "sha512-crQdqyCwhokxwV1UyDzLZanhkugAgft7vt0qbbdt60C6Zf3CAiGmtUCylbtYwrU6loOUw3euGrNtW1J651ot1A=="
},
"cacache": {
- "version": "15.1.0",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.1.0.tgz",
- "integrity": "sha512-mfx0C+mCfWjD1PnwQ9yaOrwG1ou9FkKnx0SvzUHWdFt7r7GaRtzT+9M8HAvLu62zIHtnpQ/1m93nWNDCckJGXQ==",
+ "version": "15.2.0",
+ "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.2.0.tgz",
+ "integrity": "sha512-uKoJSHmnrqXgthDFx/IU6ED/5xd+NNGe+Bb+kLZy7Ku4P+BaiWEUflAKPZ7eAzsYGcsAGASJZsybXp+quEcHTw==",
"requires": {
"@npmcli/move-file": "^1.0.1",
"chownr": "^2.0.0",
diff --git a/package.json b/package.json
index 00209b624538c..e02f77c9c1707 100644
--- a/package.json
+++ b/package.json
@@ -51,7 +51,7 @@
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
"byte-size": "^7.0.1",
- "cacache": "^15.1.0",
+ "cacache": "^15.2.0",
"chalk": "^4.1.0",
"chownr": "^2.0.0",
"cli-columns": "^3.1.2",