Skip to content

Commit

Permalink
fix: linting (#121)
Browse files Browse the repository at this point in the history
  • Loading branch information
wraithgar committed Aug 15, 2022
1 parent e8d1e85 commit a683cff
Show file tree
Hide file tree
Showing 18 changed files with 1,050 additions and 1,403 deletions.
2 changes: 1 addition & 1 deletion lib/content/read.js
Expand Up @@ -81,7 +81,7 @@ function readStream (cache, integrity, opts = {}) {
return stream.emit('error', sizeError(size, stat.size))
}

readPipeline(cpath, stat.size, sri, stream)
return readPipeline(cpath, stat.size, sri, stream)
}).catch(err => stream.emit('error', err))

return stream
Expand Down
2 changes: 2 additions & 0 deletions lib/content/write.js
Expand Up @@ -80,9 +80,11 @@ class CacacheWriteStream extends Flush {
// defer this one tick by rejecting a promise on it.
return Promise.reject(e).catch(cb)
}
// eslint-disable-next-line promise/catch-or-return
this.handleContentP.then(
(res) => {
res.integrity && this.emit('integrity', res.integrity)
// eslint-disable-next-line promise/always-return
res.size !== null && this.emit('size', res.size)
cb()
},
Expand Down
1 change: 1 addition & 0 deletions lib/entry-index.js
Expand Up @@ -285,6 +285,7 @@ function lsStream (cache) {
}))
}))
stream.end()
return stream
}).catch(err => stream.emit('error', err))

return stream
Expand Down
1 change: 1 addition & 0 deletions lib/get.js
Expand Up @@ -155,6 +155,7 @@ function getStream (cache, key, opts = {}) {
stream.unshift(memoStream)
}
stream.unshift(src)
return stream
}).catch((err) => stream.emit('error', err))

return stream
Expand Down
168 changes: 65 additions & 103 deletions test/content/read.js
Expand Up @@ -28,17 +28,16 @@ const getReadStatFailure = (t, err) => getRead(t, {
}),
})

t.test('read: returns a Promise with cache content data', function (t) {
t.test('read: returns a Promise with cache content data', async t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const CACHE = t.testdir(
CacheContent({
[INTEGRITY]: CONTENT,
})
)
return read(CACHE, INTEGRITY).then((data) => {
t.same(data, CONTENT, 'cache contents read correctly')
})
const data = await read(CACHE, INTEGRITY)
t.same(data, CONTENT, 'cache contents read correctly')
})

t.test('read.sync: reads synchronously', (t) => {
Expand All @@ -54,7 +53,7 @@ t.test('read.sync: reads synchronously', (t) => {
t.end()
})

t.test('read.stream: returns a stream with cache content data', function (t) {
t.test('read.stream: returns a stream with cache content data', async t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const CACHE = t.testdir(
Expand All @@ -63,16 +62,15 @@ t.test('read.stream: returns a stream with cache content data', function (t) {
})
)
const stream = read.stream(CACHE, INTEGRITY)
return Promise.all([
const [fromStream, fromBulk] = await Promise.all([
stream.concat(),
read(CACHE, INTEGRITY, { size: CONTENT.length }),
]).then(([fromStream, fromBulk]) => {
t.same(fromStream, CONTENT, 'stream data checks out')
t.same(fromBulk, CONTENT, 'promise data checks out')
})
])
t.same(fromStream, CONTENT, 'stream data checks out')
t.same(fromBulk, CONTENT, 'promise data checks out')
})

t.test('read: allows hashAlgorithm configuration', function (t) {
t.test('read: allows hashAlgorithm configuration', async t => {
const CONTENT = Buffer.from('foobarbaz')
const HASH = 'sha384'
const INTEGRITY = ssri.fromData(CONTENT, { algorithms: [HASH] })
Expand All @@ -82,16 +80,15 @@ t.test('read: allows hashAlgorithm configuration', function (t) {
})
)
const stream = read.stream(CACHE, INTEGRITY)
return Promise.all([
const [fromStream, fromBulk] = await Promise.all([
stream.concat(),
read(CACHE, INTEGRITY),
]).then(([fromStream, fromBulk]) => {
t.same(fromStream, CONTENT, 'stream used algorithm')
t.same(fromBulk, CONTENT, 'promise used algorithm')
})
])
t.same(fromStream, CONTENT, 'stream used algorithm')
t.same(fromBulk, CONTENT, 'promise used algorithm')
})

t.test('read: errors if content missing', function (t) {
t.test('read: errors if content missing', async t => {
const CACHE = t.testdir({})
const stream = read.stream(CACHE, 'sha512-whatnot')
stream.on('data', function (data) {
Expand All @@ -100,28 +97,19 @@ t.test('read: errors if content missing', function (t) {
stream.on('end', function () {
throw new Error('end was emitted even though stream errored')
})
return Promise.all([
stream.promise().catch((err) => {
if (err.code === 'ENOENT') {
return err
}

throw err
}),
read(CACHE, 'sha512-whatnot').catch((err) => {
if (err.code === 'ENOENT') {
return err
}

throw err
}),
]).then(([streamErr, bulkErr]) => {
t.match(streamErr, { code: 'ENOENT' }, 'stream got the right error')
t.match(bulkErr, { code: 'ENOENT' }, 'bulk got the right error')
})
await t.rejects(
stream.promise(),
{ code: 'ENOENT' },
'stream got the right error'
)
await t.rejects(
read(CACHE, 'sha512-whatnot'),
{ code: 'ENOENT' },
'bulk got the right error'
)
})

t.test('read: errors if content fails checksum', function (t) {
t.test('read: errors if content fails checksum', async t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const CACHE = t.testdir(
Expand All @@ -133,28 +121,19 @@ t.test('read: errors if content fails checksum', function (t) {
stream.on('end', function () {
throw new Error('end was emitted even though stream errored')
})
return Promise.all([
stream.promise().catch((err) => {
if (err.code === 'EINTEGRITY') {
return err
}

throw err
}),
read(CACHE, INTEGRITY).catch((err) => {
if (err.code === 'EINTEGRITY') {
return err
}

throw err
}),
]).then(([streamErr, bulkErr]) => {
t.match(streamErr, { code: 'EINTEGRITY' }, 'stream got the right error')
t.match(bulkErr, { code: 'EINTEGRITY' }, 'bulk got the right error')
})
await t.rejects(
stream.promise(),
{ code: 'EINTEGRITY' },
'stream got the right error'
)
await t.rejects(
read(CACHE, INTEGRITY),
{ code: 'EINTEGRITY' },
'bulk got the right error'
)
})

t.test('read: errors if content size does not match size option', function (t) {
t.test('read: errors if content size does not match size option', async t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const CACHE = t.testdir(
Expand All @@ -166,27 +145,16 @@ t.test('read: errors if content size does not match size option', function (t) {
stream.on('end', function () {
throw new Error('end was called even though stream errored')
})
return Promise.all([
stream.promise().catch((err) => {
if (err.code === 'EBADSIZE') {
return err
}

throw err
}),
read(CACHE, INTEGRITY, {
size: CONTENT.length,
}).catch((err) => {
if (err.code === 'EBADSIZE') {
return err
}

throw err
}),
]).then(([streamErr, bulkErr]) => {
t.match(streamErr, { code: 'EBADSIZE' }, 'stream got the right error')
t.match(bulkErr, { code: 'EBADSIZE' }, 'bulk got the right error')
})
await t.rejects(
stream.promise(),
{ code: 'EBADSIZE' },
'stream got the right error'
)
await t.rejects(
read(CACHE, INTEGRITY, { size: CONTENT.length }),
{ code: 'EBADSIZE' },
'bulk got the right error'
)
})

t.test('read: error while parsing provided integrity data', function (t) {
Expand Down Expand Up @@ -344,27 +312,26 @@ t.test('read.sync: content size value does not match option', (t) => {
t.end()
})

t.test('hasContent: tests content existence', (t) => {
t.test('hasContent: tests content existence', async t => {
const CACHE = t.testdir(
CacheContent({
'sha1-deadbeef': '',
})
)
return Promise.all([
read.hasContent(CACHE, 'sha1-deadbeef').then((content) => {
t.ok(content.sri, 'returned sri for this content')
t.equal(content.size, 0, 'returned the right size for this content')
t.ok(content.stat.isFile(), 'returned actual stat object')
}),
read.hasContent(CACHE, 'sha1-not-there').then((content) => {
t.equal(content, false, 'returned false for missing content')
}),
read
.hasContent(CACHE, 'sha1-not-here sha1-also-not-here')
.then((content) => {
t.equal(content, false, 'multi-content hash failures work ok')
}),
])
const content = await read.hasContent(CACHE, 'sha1-deadbeef')
t.ok(content.sri, 'returned sri for this content')
t.equal(content.size, 0, 'returned the right size for this content')
t.ok(content.stat.isFile(), 'returned actual stat object')
await t.resolveMatch(
read.hasContent(CACHE, 'sha1-not-there'),
false,
'returned false for missing content'
)
await t.resolveMatch(
read.hasContent(CACHE, 'sha1-not-here sha1-also-not-here'),
false,
'multi-content hash failures work ok'
)
})

t.test('hasContent: permission error', (t) => {
Expand Down Expand Up @@ -457,7 +424,7 @@ t.test('hasContent.sync: no integrity provided', (t) => {
t.end()
})

t.test('copy: copies content to a destination path', (t) => {
t.test('copy: copies content to a destination path', async t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const CACHE = t.testdir(
Expand All @@ -466,14 +433,9 @@ t.test('copy: copies content to a destination path', (t) => {
})
)
const DEST = path.join(CACHE, 'foobar-file')
return read
.copy(CACHE, INTEGRITY, DEST)
.then(() => {
return fs.readFile(DEST)
})
.then((data) => {
t.same(data, CONTENT, 'file successfully copied')
})
await read.copy(CACHE, INTEGRITY, DEST)
const data = await fs.readFile(DEST)
t.same(data, CONTENT, 'file successfully copied')
})

t.test('copy.sync: copies content to a destination path synchronously', (t) => {
Expand Down
33 changes: 14 additions & 19 deletions test/content/write.chownr.js
Expand Up @@ -12,7 +12,7 @@ const t = require('tap')

const contentPath = require('../../lib/content/path')

t.test('infers ownership from cache folder owner', (t) => {
t.test('infers ownership from cache folder owner', async t => {
const CACHE = t.testdir({ cache: {} })
const CONTENT = 'foobarbaz'
const INTEGRITY = ssri.fromData(CONTENT)
Expand All @@ -32,21 +32,16 @@ t.test('infers ownership from cache folder owner', (t) => {
},
})
t.plan(7)
return write.stream(CACHE, { hashAlgorithm: 'sha1' })
.end(CONTENT)
.promise()
.then(() => {
const cpath = contentPath(CACHE, INTEGRITY)
const expectedPaths = [
path.join(CACHE, path.relative(CACHE, cpath).split(path.sep)[0]),
cpath,
path.join(CACHE, 'tmp'),
]
t.same(
updatedPaths.sort(),
expectedPaths,
'all paths that needed user stuff set got set'
)
})
}
)
await write.stream(CACHE, { hashAlgorithm: 'sha1' }).end(CONTENT).promise()
const cpath = contentPath(CACHE, INTEGRITY)
const expectedPaths = [
path.join(CACHE, path.relative(CACHE, cpath).split(path.sep)[0]),
cpath,
path.join(CACHE, 'tmp'),
]
t.same(
updatedPaths.sort(),
expectedPaths,
'all paths that needed user stuff set got set'
)
})

0 comments on commit a683cff

Please sign in to comment.