Skip to content

Commit

Permalink
feat: do not alter file ownership (#216)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: this package no longer attempts to change file ownership automatically
  • Loading branch information
nlf committed Oct 13, 2022
1 parent 2ac3980 commit 43ae022
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 150 deletions.
62 changes: 13 additions & 49 deletions lib/fetcher.js
Expand Up @@ -7,10 +7,10 @@ const npa = require('npm-package-arg')
const ssri = require('ssri')
const { promisify } = require('util')
const { basename, dirname } = require('path')
const rimraf = promisify(require('rimraf'))
const tar = require('tar')
const log = require('proc-log')
const retry = require('promise-retry')
const fs = require('fs/promises')
const fsm = require('fs-minipass')
const cacache = require('cacache')
const isPackageBin = require('./util/is-package-bin.js')
Expand All @@ -20,20 +20,11 @@ const readPackageJsonFast = require('read-package-json-fast')
const readPackageJson = promisify(require('read-package-json'))
const Minipass = require('minipass')

// we only change ownership on unix platforms, and only if uid is 0
const selfOwner = process.getuid && process.getuid() === 0 ? {
uid: 0,
gid: process.getgid(),
} : null
const chownr = selfOwner ? promisify(require('chownr')) : null
const inferOwner = selfOwner ? require('infer-owner') : null
const mkdirp = require('mkdirp')
const cacheDir = require('./util/cache-dir.js')

// Private methods.
// Child classes should not have to override these.
// Users should never call them.
const _chown = Symbol('_chown')
const _extract = Symbol('_extract')
const _mkdir = Symbol('_mkdir')
const _empty = Symbol('_empty')
Expand Down Expand Up @@ -359,44 +350,21 @@ class FetcherBase {
return cacache.rm.content(this.cache, this.integrity, this.opts)
}

async [_chown] (path, uid, gid) {
return selfOwner && (selfOwner.gid !== gid || selfOwner.uid !== uid)
? chownr(path, uid, gid)
: /* istanbul ignore next - we don't test in root-owned folders */ null
}

[_empty] (path) {
return getContents({ path, depth: 1 }).then(contents => Promise.all(
contents.map(entry => rimraf(entry))))
contents.map(entry => fs.rm(entry, { recursive: true, force: true }))))
}

[_mkdir] (dest) {
// if we're bothering to do owner inference, then do it.
// otherwise just make the dir, and return an empty object.
// always empty the dir dir to start with, but do so
// _after_ inferring the owner, in case there's an existing folder
// there that we would want to preserve which differs from the
// parent folder (rare, but probably happens sometimes).
return !inferOwner
? this[_empty](dest).then(() => mkdirp(dest)).then(() => ({}))
: inferOwner(dest).then(({ uid, gid }) =>
this[_empty](dest)
.then(() => mkdirp(dest))
.then(made => {
// ignore the || dest part in coverage. It's there to handle
// race conditions where the dir may be made by someone else
// after being removed by us.
const dir = made || /* istanbul ignore next */ dest
return this[_chown](dir, uid, gid)
})
.then(() => ({ uid, gid })))
async [_mkdir] (dest) {
await this[_empty](dest)
return await fs.mkdir(dest, { recursive: true })
}

// extraction is always the same. the only difference is where
// the tarball comes from.
extract (dest) {
return this[_mkdir](dest).then(({ uid, gid }) =>
this.tarballStream(tarball => this[_extract](dest, tarball, uid, gid)))
async extract (dest) {
await this[_mkdir](dest)
return this.tarballStream((tarball) => this[_extract](dest, tarball))
}

[_toFile] (dest) {
Expand All @@ -414,18 +382,14 @@ class FetcherBase {
}

// don't use this[_mkdir] because we don't want to rimraf anything
tarballFile (dest) {
async tarballFile (dest) {
const dir = dirname(dest)
return !inferOwner
? mkdirp(dir).then(() => this[_toFile](dest))
: inferOwner(dest).then(({ uid, gid }) =>
mkdirp(dir).then(made => this[_toFile](dest)
.then(res => this[_chown](made || dir, uid, gid)
.then(() => res))))
await fs.mkdir(dir, { recursive: true })
return this[_toFile](dest)
}

[_extract] (dest, tarball, uid, gid) {
const extractor = tar.x(this[_tarxOptions]({ cwd: dest, uid, gid }))
[_extract] (dest, tarball) {
const extractor = tar.x(this[_tarxOptions]({ cwd: dest }))
const p = new Promise((resolve, reject) => {
extractor.on('end', () => {
resolve({
Expand Down
6 changes: 1 addition & 5 deletions package.json
Expand Up @@ -48,12 +48,9 @@
"@npmcli/installed-package-contents": "^1.0.7",
"@npmcli/promise-spawn": "^3.0.0",
"@npmcli/run-script": "^4.1.0",
"cacache": "^16.0.0",
"chownr": "^2.0.0",
"cacache": "^17.0.0",
"fs-minipass": "^2.1.0",
"infer-owner": "^1.0.4",
"minipass": "^3.1.6",
"mkdirp": "^1.0.4",
"npm-package-arg": "^9.0.0",
"npm-packlist": "^7.0.0",
"npm-pick-manifest": "^7.0.0",
Expand All @@ -62,7 +59,6 @@
"promise-retry": "^2.0.1",
"read-package-json": "^5.0.0",
"read-package-json-fast": "^3.0.0",
"rimraf": "^3.0.2",
"ssri": "^9.0.0",
"tar": "^6.1.11"
},
Expand Down
136 changes: 52 additions & 84 deletions test/fetcher.js
@@ -1,16 +1,4 @@
const fakeSudo = process.argv[2] === 'fake-sudo'
const fs = require('fs')
process.chownLog = []
if (fakeSudo) {
process.realUid = process.getuid()
process.getuid = () => 0
const fakeChown = type => (path, uid, gid, cb) => {
process.chownLog.push({ type, path, uid, gid })
process.nextTick(cb)
}
fs.chown = fakeChown('chown')
fs.lchown = fakeChown('lchown')
}

fs.utimes = () => {
throw new Error('do not call utimes')
Expand Down Expand Up @@ -106,10 +94,6 @@ t.test('tarball data', async t => {

t.test('tarballFile', t => {
const target = resolve(me, 'tarball-file')
if (fakeSudo) {
process.chownLog.length = 0
}

t.test('basic copy', t =>
new FileFetcher(abbrevspec, { cache })
.tarballFile(target + '/basic/1.tgz'))
Expand Down Expand Up @@ -198,10 +182,6 @@ t.test('extract', t => {
})
}

if (fakeSudo) {
process.chownLog.length = 0
}

return new FileFetcher(abbrevspec, { cache }).extract(target + '/uncached')
.then(check('uncached'))
.then(() => new FileFetcher(abbrevspec, {
Expand All @@ -210,12 +190,6 @@ t.test('extract', t => {
resolved: abbrev,
}).extract(target + '/cached'))
.then(check('cached'))
.then(!fakeSudo ? () => {} : () => {
t.not(process.chownLog.length, 0, 'did some chowns')
const log = { uid: process.realUid, gid: process.getgid() }
process.chownLog.forEach(entry => t.match(entry, log, 'chowned to me'))
process.chownLog.length = 0
})
.then(() => {
// test a bad cache entry
cacache.get.stream.byDigest = () => {
Expand Down Expand Up @@ -413,7 +387,6 @@ t.test('a non-retriable cache error', t => {
const poop = new Error('poop')
poop.code = 'LE_POOP'
t.teardown(mutateFS.fail('read', poop))
t.teardown(() => process.chownLog.length = 0)
return cacache.put(cache, 'any-old-key', data, {
integrity: abbrevMani._integrity,
}).then(() => t.rejects(new FileFetcher(abbrev, {
Expand All @@ -422,70 +395,65 @@ t.test('a non-retriable cache error', t => {
}).manifest(), poop))
})

// no need to do some of these basic tests in sudo mode
if (!fakeSudo) {
t.spawn(process.execPath, [__filename, 'fake-sudo'], 'fake sudo mode')
t.test('before implies full metadata', t => {
const f = new Fetcher('foo', { before: new Date('1979-07-01') })
t.equal(f.fullMetadata, true)
t.end()
})

t.test('before implies full metadata', t => {
const f = new Fetcher('foo', { before: new Date('1979-07-01') })
t.equal(f.fullMetadata, true)
t.end()
t.test('various projectiles', t => {
t.throws(() => new Fetcher(), { message: 'options object is required' })
const f = new Fetcher('foo', {})
// base class doesn't implement functionality
const expect = {
message: 'not implemented in this fetcher type: FetcherBase',
}
t.rejects(f.resolve(), expect)
f.resolved = 'fooblz'
t.resolveMatch(f.resolve(), 'fooblz', 'has resolved')
t.rejects(f.extract('target'), expect)
t.rejects(f.manifest(), expect)
t.rejects(f.packument(), expect)
t.rejects(f.tarball(), expect)
const foo = npa('foo@bar')
foo.type = 'blerg'
t.throws(() => Fetcher.get(foo, {}), {
message: 'Unknown spec type: blerg',
})

t.test('various projectiles', t => {
t.throws(() => new Fetcher(), { message: 'options object is required' })
const f = new Fetcher('foo', {})
// base class doesn't implement functionality
const expect = {
message: 'not implemented in this fetcher type: FetcherBase',
}
t.rejects(f.resolve(), expect)
f.resolved = 'fooblz'
t.resolveMatch(f.resolve(), 'fooblz', 'has resolved')
t.rejects(f.extract('target'), expect)
t.rejects(f.manifest(), expect)
t.rejects(f.packument(), expect)
t.rejects(f.tarball(), expect)
const foo = npa('foo@bar')
foo.type = 'blerg'
t.throws(() => Fetcher.get(foo, {}), {
message: 'Unknown spec type: blerg',
})

class KidFetcher extends Fetcher {
get types () {
return ['kid']
}
class KidFetcher extends Fetcher {
get types () {
return ['kid']
}
t.throws(() => new KidFetcher('foo', {}), {
message: `Wrong spec type (tag) for KidFetcher. Supported types: kid`,
})
t.end()
}
t.throws(() => new KidFetcher('foo', {}), {
message: `Wrong spec type (tag) for KidFetcher. Supported types: kid`,
})
t.end()
})

t.test('fetcher.get', t => {
const specToType = {
foo: 'RegistryFetcher',
'foo@bar': 'RegistryFetcher',
'foo@1.2': 'RegistryFetcher',
'foo@1.2.3': 'RegistryFetcher',
'npm:foo@2': 'RegistryFetcher',
'@foo/bar': 'RegistryFetcher',
'@foo/bar@1.2': 'RegistryFetcher',
'@foo/bar@1.2.3': 'RegistryFetcher',
'foo.tgz': 'FileFetcher',
'/path/to/foo': 'DirFetcher',
'isaacs/foo': 'GitFetcher',
'git+https://github.com/isaacs/foo': 'GitFetcher',
'https://server.com/foo.tgz': 'RemoteFetcher',
}
for (const [spec, type] of Object.entries(specToType)) {
t.equal(Fetcher.get(spec).type, type)
}
t.test('fetcher.get', t => {
const specToType = {
foo: 'RegistryFetcher',
'foo@bar': 'RegistryFetcher',
'foo@1.2': 'RegistryFetcher',
'foo@1.2.3': 'RegistryFetcher',
'npm:foo@2': 'RegistryFetcher',
'@foo/bar': 'RegistryFetcher',
'@foo/bar@1.2': 'RegistryFetcher',
'@foo/bar@1.2.3': 'RegistryFetcher',
'foo.tgz': 'FileFetcher',
'/path/to/foo': 'DirFetcher',
'isaacs/foo': 'GitFetcher',
'git+https://github.com/isaacs/foo': 'GitFetcher',
'https://server.com/foo.tgz': 'RemoteFetcher',
}
for (const [spec, type] of Object.entries(specToType)) {
t.equal(Fetcher.get(spec).type, type)
}

t.end()
})
}
t.end()
})

t.test('make bins executable', async t => {
const file = resolve(__dirname, 'fixtures/bin-object.tgz')
Expand Down
24 changes: 12 additions & 12 deletions test/git.js
Expand Up @@ -61,7 +61,6 @@ const fs = require('fs')
const http = require('http')

const { dirname, basename, resolve } = require('path')
const rimraf = require('rimraf')
const fixtures = resolve(__dirname, 'fixtures')
const abbrev = resolve(fixtures, 'abbrev-1.1.1.tgz')
const prepIgnore = resolve(fixtures, 'prepare-requires-gitignore-1.2.3.tgz')
Expand All @@ -84,7 +83,8 @@ const spawnGit = require('@npmcli/git').spawn
const { spawn } = require('child_process')
const spawnNpm = require('../lib/util/npm.js')

const mkdirp = require('mkdirp')
const { mkdir } = require('fs/promises')
const { rmSync } = require('fs')

const tar = require('tar')

Expand All @@ -99,8 +99,8 @@ t.test('setup', { bail: true }, t => {
'--no-sign-git-tag',
], repo)

mkdirp.sync(repo)
return git('init')
return mkdir(repo, { recursive: true })
.then(() => git('init'))
.then(() => git('config', 'user.name', 'pacotedev'))
.then(() => git('config', 'user.email', 'i+pacotedev@izs.me'))
.then(() => git('config', 'tag.gpgSign', 'false'))
Expand Down Expand Up @@ -180,7 +180,7 @@ t.test('setup', { bail: true }, t => {
const name = basename(repoDir)
const otherName = basename(other)

mkdirp.sync(repoDir)
await mkdir(repoDir, { recursive: true })
await git('init')
await git('config', 'user.name', 'pacotedev')
await git('config', 'user.email', 'i+pacotedev@izs.me')
Expand Down Expand Up @@ -227,15 +227,15 @@ t.test('setup', { bail: true }, t => {
}
daemon.stderr.on('data', onDaemonData)
// only clean up the dir once the daemon is banished
daemon.on('close', () => rimraf.sync(me))
daemon.on('close', () => rmSync(me, { recursive: true, force: true }))
})

t.test('create a repo with a submodule', t => {
const submoduleRepo = resolve(me, 'submodule-repo')
const git = (...cmd) => spawnGit(cmd, { cwd: submoduleRepo })
const write = (f, c) => fs.writeFileSync(`${submoduleRepo}/${f}`, c)
mkdirp.sync(submoduleRepo)
return git('init')
return mkdir(submoduleRepo, { recursive: true })
.then(() => git('init'))
.then(() => git('config', 'user.name', 'pacotedev'))
.then(() => git('config', 'user.email', 'i+pacotedev@izs.me'))
.then(() => git('config', 'tag.gpgSign', 'false'))
Expand Down Expand Up @@ -293,8 +293,8 @@ t.test('setup', { bail: true }, t => {
const wsfolder = resolve(me, 'workspaces-repo/a')
const git = (...cmd) => spawnGit(cmd, { cwd: workspacesRepo })
const write = (f, c) => fs.writeFileSync(`${workspacesRepo}/${f}`, c)
mkdirp.sync(wsfolder)
return git('init')
return mkdir(wsfolder, { recursive: true })
.then(() => git('init'))
.then(() => git('config', 'user.name', 'pacotedev'))
.then(() => git('config', 'user.email', 'i+pacotedev@github.com'))
.then(() => git('config', 'tag.gpgSign', 'false'))
Expand Down Expand Up @@ -322,8 +322,8 @@ t.test('setup', { bail: true }, t => {
const prepackRepo = resolve(me, 'prepack-repo')
const git = (...cmd) => spawnGit(cmd, { cwd: prepackRepo })
const write = (f, c) => fs.writeFileSync(`${prepackRepo}/${f}`, c)
mkdirp.sync(prepackRepo)
return git('init')
return mkdir(prepackRepo, { recursive: true })
.then(() => git('init'))
.then(() => git('config', 'user.name', 'pacotedev'))
.then(() => git('config', 'user.email', 'i+pacotedev@github.com'))
.then(() => git('config', 'tag.gpgSign', 'false'))
Expand Down

0 comments on commit 43ae022

Please sign in to comment.