Skip to content

Commit

Permalink
fix(standard): standard --fix
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Jun 17, 2019
1 parent 03d7dfe commit 7799149
Show file tree
Hide file tree
Showing 17 changed files with 64 additions and 64 deletions.
2 changes: 1 addition & 1 deletion lib/content/path.js
Expand Up @@ -12,7 +12,7 @@ const ssri = require('ssri')
//
module.exports = contentPath
function contentPath (cache, integrity) {
const sri = ssri.parse(integrity, {single: true})
const sri = ssri.parse(integrity, { single: true })
// contentPath is the *strongest* algo given
return path.join.apply(path, [
contentDir(cache),
Expand Down
12 changes: 6 additions & 6 deletions lib/content/read.js
Expand Up @@ -54,8 +54,8 @@ function readStream (cache, integrity, opts) {
opts = ReadOpts(opts)
const stream = new PassThrough()
withContentSri(cache, integrity, (cpath, sri) => {
return lstatAsync(cpath).then(stat => ({cpath, sri, stat}))
}).then(({cpath, sri, stat}) => {
return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
}).then(({ cpath, sri, stat }) => {
return pipe(
fs.createReadStream(cpath),
ssri.integrityStream({
Expand Down Expand Up @@ -95,7 +95,7 @@ module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
if (!integrity) { return BB.resolve(false) }
return withContentSri(cache, integrity, (cpath, sri) => {
return lstatAsync(cpath).then(stat => ({size: stat.size, sri, stat}))
return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
}).catch(err => {
if (err.code === 'ENOENT') { return false }
if (err.code === 'EPERM') {
Expand All @@ -114,7 +114,7 @@ function hasContentSync (cache, integrity) {
return withContentSriSync(cache, integrity, (cpath, sri) => {
try {
const stat = fs.lstatSync(cpath)
return {size: stat.size, sri, stat}
return { size: stat.size, sri, stat }
} catch (err) {
if (err.code === 'ENOENT') { return false }
if (err.code === 'EPERM') {
Expand All @@ -141,12 +141,12 @@ function withContentSri (cache, integrity, fn) {
} else {
return BB.any(sri[sri.pickAlgorithm()].map(meta => {
return withContentSri(cache, meta, fn)
}, {concurrency: 1}))
}, { concurrency: 1 }))
.catch(err => {
if ([].some.call(err, e => e.code === 'ENOENT')) {
throw Object.assign(
new Error('No matching content found for ' + sri.toString()),
{code: 'ENOENT'}
{ code: 'ENOENT' }
)
} else {
throw err[0]
Expand Down
6 changes: 3 additions & 3 deletions lib/content/write.js
Expand Up @@ -36,11 +36,11 @@ function write (cache, data, opts) {
}
return BB.using(makeTmp(cache, opts), tmp => (
writeFileAsync(
tmp.target, data, {flag: 'wx'}
tmp.target, data, { flag: 'wx' }
).then(() => (
moveToDestination(tmp, cache, sri, opts)
))
)).then(() => ({integrity: sri, size: data.length}))
)).then(() => ({ integrity: sri, size: data.length }))
}

module.exports.stream = writeStream
Expand Down Expand Up @@ -111,7 +111,7 @@ function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
})
errCheck()
return pipe(inputStream, hashStream, outStream).then(() => {
return {integrity, size}
return { integrity, size }
}).catch(err => {
return rimraf(tmpTarget).then(() => { throw err })
})
Expand Down
8 changes: 4 additions & 4 deletions lib/entry-index.js
Expand Up @@ -64,7 +64,7 @@ function insert (cache, key, integrity, opts) {
)
}).then(
() => fixOwner.chownr(bucket, opts.uid, opts.gid)
).catch({code: 'ENOENT'}, () => {
).catch({ code: 'ENOENT' }, () => {
// There's a class of race conditions that happen when things get deleted
// during fixOwner, or between the two mkdirfix/chownr calls.
//
Expand Down Expand Up @@ -178,7 +178,7 @@ function lsStream (cache) {
const formatted = formatEntry(cache, entry)
formatted && stream.push(formatted)
}
}).catch({code: 'ENOENT'}, nop)
}).catch({ code: 'ENOENT' }, nop)
})
})
}).then(() => {
Expand Down Expand Up @@ -282,8 +282,8 @@ function formatEntry (cache, entry) {

function readdirOrEmpty (dir) {
return readdirAsync(dir)
.catch({code: 'ENOENT'}, () => [])
.catch({code: 'ENOTDIR'}, () => [])
.catch({ code: 'ENOENT' }, () => [])
.catch({ code: 'ENOTDIR' }, () => [])
}

function nop () {
Expand Down
4 changes: 2 additions & 2 deletions lib/util/fix-owner.js
Expand Up @@ -27,7 +27,7 @@ function fixOwner (filepath, uid, gid) {
filepath,
typeof uid === 'number' ? uid : process.getuid(),
typeof gid === 'number' ? gid : process.getgid()
).catch({code: 'ENOENT'}, () => null)
).catch({ code: 'ENOENT' }, () => null)
)
}

Expand Down Expand Up @@ -65,7 +65,7 @@ function mkdirfix (p, uid, gid, cb) {
if (made) {
return fixOwner(made, uid, gid).then(() => made)
}
}).catch({code: 'EEXIST'}, () => {
}).catch({ code: 'EEXIST' }, () => {
// There's a race in mkdirp!
return fixOwner(p, uid, gid).then(() => null)
})
Expand Down
8 changes: 4 additions & 4 deletions lib/verify.js
Expand Up @@ -130,7 +130,7 @@ function garbageCollect (cache, opts) {
})
})
}
}, {concurrency: opts.concurrency}))
}, { concurrency: opts.concurrency }))
})
})
}
Expand All @@ -150,7 +150,7 @@ function verifyContent (filepath, sri) {
contentInfo.valid = false
})
}).then(() => contentInfo)
}).catch({code: 'ENOENT'}, () => ({size: 0, valid: false}))
}).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false }))
}

function rebuildIndex (cache, opts) {
Expand Down Expand Up @@ -183,7 +183,7 @@ function rebuildIndex (cache, opts) {
}
return BB.map(Object.keys(buckets), key => {
return rebuildBucket(cache, buckets[key], stats, opts)
}, {concurrency: opts.concurrency}).then(() => stats)
}, { concurrency: opts.concurrency }).then(() => stats)
})
}

Expand All @@ -200,7 +200,7 @@ function rebuildBucket (cache, bucket, stats, opts) {
metadata: entry.metadata,
size: entry.size
}).then(() => { stats.totalEntries++ })
}).catch({code: 'ENOENT'}, () => {
}).catch({ code: 'ENOENT' }, () => {
stats.rejectedEntries++
stats.missingContent++
})
Expand Down
4 changes: 2 additions & 2 deletions put.js
Expand Up @@ -28,7 +28,7 @@ function putData (cache, key, data, opts) {
opts = PutOpts(opts)
return write(cache, data, opts).then(res => {
return index.insert(
cache, key, res.integrity, opts.concat({size: res.size})
cache, key, res.integrity, opts.concat({ size: res.size })
).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, data, opts)
Expand Down Expand Up @@ -63,7 +63,7 @@ function putStream (cache, key, opts) {
})
}, cb => {
contentStream.end(() => {
index.insert(cache, key, integrity, opts.concat({size})).then(entry => {
index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
}
Expand Down
4 changes: 2 additions & 2 deletions test/benchmarks/index.find.js
Expand Up @@ -44,8 +44,8 @@ module.exports = (suite, CACHE) => {
},
onStart () {
const fixture = new Tacks(CacheIndex({
'foo': {key: 'foo'},
'w/e': {key: 'w/e'}
'foo': { key: 'foo' },
'w/e': { key: 'w/e' }
}))
fixture.create(CACHE)
this.fixture = fixture
Expand Down
2 changes: 1 addition & 1 deletion test/benchmarks/index.js
Expand Up @@ -71,5 +71,5 @@ fs.readdir(__dirname, (err, files) => {
require('./' + f)(suite, path.join(CACHE, path.basename(f, '.js')))
}
})
suite.run({async: true})
suite.run({ async: true })
})
16 changes: 8 additions & 8 deletions test/content.read.js
Expand Up @@ -54,7 +54,7 @@ test('read.stream: returns a stream with cache content data', function (t) {
stream.on('data', function (data) { buf += data })
return BB.join(
finished(stream).then(() => Buffer.from(buf)),
read(CACHE, INTEGRITY, {size: CONTENT.length}),
read(CACHE, INTEGRITY, { size: CONTENT.length }),
(fromStream, fromBulk) => {
t.deepEqual(fromStream, CONTENT, 'stream data checks out')
t.deepEqual(fromBulk, CONTENT, 'promise data checks out')
Expand All @@ -65,7 +65,7 @@ test('read.stream: returns a stream with cache content data', function (t) {
test('read: allows hashAlgorithm configuration', function (t) {
const CONTENT = Buffer.from('foobarbaz')
const HASH = 'whirlpool'
const INTEGRITY = ssri.fromData(CONTENT, {algorithms: [HASH]})
const INTEGRITY = ssri.fromData(CONTENT, { algorithms: [HASH] })
const fixture = new Tacks(CacheContent({
[INTEGRITY]: CONTENT
}))
Expand Down Expand Up @@ -93,8 +93,8 @@ test('read: errors if content missing', function (t) {
throw new Error('end was called even though stream errored')
})
return BB.join(
finished(stream).catch({code: 'ENOENT'}, err => err),
read(CACHE, 'sha512-whatnot').catch({code: 'ENOENT'}, err => err),
finished(stream).catch({ code: 'ENOENT' }, err => err),
read(CACHE, 'sha512-whatnot').catch({ code: 'ENOENT' }, err => err),
(streamErr, bulkErr) => {
t.equal(streamErr.code, 'ENOENT', 'stream got the right error')
t.equal(bulkErr.code, 'ENOENT', 'bulk got the right error')
Expand All @@ -114,8 +114,8 @@ test('read: errors if content fails checksum', function (t) {
throw new Error('end was called even though stream errored')
})
return BB.join(
finished(stream).catch({code: 'EINTEGRITY'}, err => err),
read(CACHE, INTEGRITY).catch({code: 'EINTEGRITY'}, err => err),
finished(stream).catch({ code: 'EINTEGRITY' }, err => err),
read(CACHE, INTEGRITY).catch({ code: 'EINTEGRITY' }, err => err),
(streamErr, bulkErr) => {
t.equal(streamErr.code, 'EINTEGRITY', 'stream got the right error')
t.equal(bulkErr.code, 'EINTEGRITY', 'bulk got the right error')
Expand All @@ -135,10 +135,10 @@ test('read: errors if content size does not match size option', function (t) {
throw new Error('end was called even though stream errored')
})
return BB.join(
finished(stream).catch({code: 'EBADSIZE'}, err => err),
finished(stream).catch({ code: 'EBADSIZE' }, err => err),
read(CACHE, INTEGRITY, {
size: CONTENT.length
}).catch({code: 'EBADSIZE'}, err => err),
}).catch({ code: 'EBADSIZE' }, err => err),
(streamErr, bulkErr) => {
t.equal(streamErr.code, 'EBADSIZE', 'stream got the right error')
t.equal(bulkErr.code, 'EBADSIZE', 'bulk got the right error')
Expand Down
4 changes: 2 additions & 2 deletions test/index.find.js
Expand Up @@ -43,8 +43,8 @@ test('index.find cache hit', function (t) {

test('index.find cache miss', function (t) {
const fixture = new Tacks(CacheIndex({
'foo': {key: 'foo'},
'w/e': {key: 'w/e'}
'foo': { key: 'foo' },
'w/e': { key: 'w/e' }
}))
fixture.create(CACHE)
return index.find(
Expand Down
2 changes: 1 addition & 1 deletion test/index.insert.js
Expand Up @@ -59,7 +59,7 @@ test('inserts additional entries into existing key', function (t) {
return index.insert(CACHE, KEY, INTEGRITY, opts({
metadata: 1
})).then(() => (
index.insert(CACHE, KEY, INTEGRITY, opts({metadata: 2}))
index.insert(CACHE, KEY, INTEGRITY, opts({ metadata: 2 }))
)).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
Expand Down
42 changes: 21 additions & 21 deletions test/memoization.js
Expand Up @@ -70,25 +70,25 @@ test('can clear out the memoization cache', t => {
test('accepts optional injected cache', t => {
memo.clearMemoized()
const MEMO = new Map()
memo.put(CACHE, ENTRY, DATA, {memoize: MEMO})
memo.put(CACHE, ENTRY, DATA, { memoize: MEMO })
t.deepEqual(
memo.get(CACHE, ENTRY.key),
null,
'entry not in global memo cache'
)
t.deepEqual(
memo.get(CACHE, ENTRY.key, {memoize: MEMO}),
{entry: ENTRY, data: DATA},
memo.get(CACHE, ENTRY.key, { memoize: MEMO }),
{ entry: ENTRY, data: DATA },
'entry fetched from injected memoizer'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: MEMO}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }),
DATA,
'content entry fetched from injected memoizer'
)
t.deepEqual(
MEMO.get(`key:${CACHE}:${ENTRY.key}`),
{entry: ENTRY, data: DATA},
{ entry: ENTRY, data: DATA },
'entry is in the injected memoizer'
)
t.deepEqual(
Expand All @@ -98,62 +98,62 @@ test('accepts optional injected cache', t => {
)
MEMO.clear()
t.deepEqual(
memo.get(CACHE, ENTRY.key, {memoize: MEMO}),
memo.get(CACHE, ENTRY.key, { memoize: MEMO }),
null,
'tried to read from cleared memoizer'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: MEMO}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }),
null,
'tried to read by digest from cleared memoizer'
)
memo.put.byDigest(CACHE, ENTRY.integrity, DATA, {memoize: MEMO})
memo.put.byDigest(CACHE, ENTRY.integrity, DATA, { memoize: MEMO })
t.deepEqual(
MEMO.get(`digest:${CACHE}:${ENTRY.integrity}`),
DATA,
'content entry is in the injected memoizer'
)
const obj = {}
memo.put(CACHE, ENTRY, DATA, {memoize: obj})
memo.put(CACHE, ENTRY, DATA, { memoize: obj })
t.deepEqual(
memo.get(CACHE, ENTRY.key, {memoize: obj}),
{entry: ENTRY, data: DATA},
memo.get(CACHE, ENTRY.key, { memoize: obj }),
{ entry: ENTRY, data: DATA },
'entry fetched from injected object memoizer'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: MEMO}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: MEMO }),
DATA,
'content entry fetched from injected object memoizer'
)
memo.clearMemoized()
memo.put(CACHE, ENTRY, DATA, {memoize: 'foo'})
memo.put(CACHE, ENTRY, DATA, { memoize: 'foo' })
t.deepEqual(
memo.get(CACHE, ENTRY.key, {memoize: 'foo'}),
{entry: ENTRY, data: DATA},
memo.get(CACHE, ENTRY.key, { memoize: 'foo' }),
{ entry: ENTRY, data: DATA },
'entry fetched from global memoization obj on non-obj option'
)
t.deepEqual(
memo.get(CACHE, ENTRY.key, {memoize: 'foo'}),
{entry: ENTRY, data: DATA},
memo.get(CACHE, ENTRY.key, { memoize: 'foo' }),
{ entry: ENTRY, data: DATA },
'entry fetched from global memoization obj on non-obj option'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: 'foo'}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: 'foo' }),
DATA,
'content entry fetched global memoizer obj on non-obj option'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: 'foo'}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: 'foo' }),
DATA,
'content entry fetched global memoizer obj on non-obj option'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: false}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: false }),
DATA,
'content entry fetched global memoizer obj on non-obj option'
)
t.deepEqual(
memo.get.byDigest(CACHE, ENTRY.integrity, {memoize: false}),
memo.get.byDigest(CACHE, ENTRY.integrity, { memoize: false }),
DATA,
'content entry fetched global memoizer obj on non-obj option'
)
Expand Down
2 changes: 1 addition & 1 deletion test/put.js
Expand Up @@ -46,7 +46,7 @@ test('basic stream insertion', t => {
})

test('adds correct entry to index before finishing', t => {
return put(CACHE, KEY, CONTENT, {metadata: METADATA}).then(() => {
return put(CACHE, KEY, CONTENT, { metadata: METADATA }).then(() => {
return index.find(CACHE, KEY)
}).then(entry => {
t.ok(entry, 'got an entry')
Expand Down

0 comments on commit 7799149

Please sign in to comment.