diff --git a/lib/verify.js b/lib/verify.js index 33f566c..729ae98 100644 --- a/lib/verify.js +++ b/lib/verify.js @@ -100,7 +100,11 @@ async function garbageCollect (cache, opts) { return } - liveContent.add(entry.integrity.toString()) + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } }) await new Promise((resolve, reject) => { indexStream.on('end', resolve).on('error', reject) diff --git a/test/verify.js b/test/verify.js index d0a64de..37cfb46 100644 --- a/test/verify.js +++ b/test/verify.js @@ -14,7 +14,8 @@ const KEY = 'my-test-key' const INTEGRITY = ssri.fromData(CONTENT) const METADATA = { foo: 'bar' } -const verify = require('..').verify +const cacache = require('..') +const verify = cacache.verify // defines reusable errors const genericError = new Error('ERR') @@ -385,3 +386,17 @@ t.test('hash collisions excluded', async t => { 'should resolve while also excluding filtered out entries' ) }) + +t.test('handles multiple hashes of the same content', async t => { + const cache = t.testdir() + let integrity + // anything other than the default (currently sha512) + await cacache.put.stream(cache, 'test', { algorithms: ['sha256'] }).on('integrity', i => { + integrity = i + }).end('CONTENT!').promise() + await cacache.put.stream(cache, 'test', { integrity }).end('CONTENT!').promise() + await cacache.verify(cache) + const ls = await cacache.ls(cache) + t.match(ls.test.integrity, 'sha512') + t.match(ls.test.integrity, 'sha256') +})