From 43ae02203f04fd597a6d73f0a8bb4e639986e62e Mon Sep 17 00:00:00 2001 From: nlf Date: Thu, 13 Oct 2022 12:12:40 -0700 Subject: [PATCH] feat: do not alter file ownership (#216) BREAKING CHANGE: this package no longer attempts to change file ownership automatically --- lib/fetcher.js | 62 +++++----------------- package.json | 6 +-- test/fetcher.js | 136 ++++++++++++++++++------------------------------ test/git.js | 24 ++++----- 4 files changed, 78 insertions(+), 150 deletions(-) diff --git a/lib/fetcher.js b/lib/fetcher.js index 95b6d3ee..fe5679f0 100644 --- a/lib/fetcher.js +++ b/lib/fetcher.js @@ -7,10 +7,10 @@ const npa = require('npm-package-arg') const ssri = require('ssri') const { promisify } = require('util') const { basename, dirname } = require('path') -const rimraf = promisify(require('rimraf')) const tar = require('tar') const log = require('proc-log') const retry = require('promise-retry') +const fs = require('fs/promises') const fsm = require('fs-minipass') const cacache = require('cacache') const isPackageBin = require('./util/is-package-bin.js') @@ -20,20 +20,11 @@ const readPackageJsonFast = require('read-package-json-fast') const readPackageJson = promisify(require('read-package-json')) const Minipass = require('minipass') -// we only change ownership on unix platforms, and only if uid is 0 -const selfOwner = process.getuid && process.getuid() === 0 ? { - uid: 0, - gid: process.getgid(), -} : null -const chownr = selfOwner ? promisify(require('chownr')) : null -const inferOwner = selfOwner ? require('infer-owner') : null -const mkdirp = require('mkdirp') const cacheDir = require('./util/cache-dir.js') // Private methods. // Child classes should not have to override these. // Users should never call them. -const _chown = Symbol('_chown') const _extract = Symbol('_extract') const _mkdir = Symbol('_mkdir') const _empty = Symbol('_empty') @@ -359,44 +350,21 @@ class FetcherBase { return cacache.rm.content(this.cache, this.integrity, this.opts) } - async [_chown] (path, uid, gid) { - return selfOwner && (selfOwner.gid !== gid || selfOwner.uid !== uid) - ? chownr(path, uid, gid) - : /* istanbul ignore next - we don't test in root-owned folders */ null - } - [_empty] (path) { return getContents({ path, depth: 1 }).then(contents => Promise.all( - contents.map(entry => rimraf(entry)))) + contents.map(entry => fs.rm(entry, { recursive: true, force: true })))) } - [_mkdir] (dest) { - // if we're bothering to do owner inference, then do it. - // otherwise just make the dir, and return an empty object. - // always empty the dir dir to start with, but do so - // _after_ inferring the owner, in case there's an existing folder - // there that we would want to preserve which differs from the - // parent folder (rare, but probably happens sometimes). - return !inferOwner - ? this[_empty](dest).then(() => mkdirp(dest)).then(() => ({})) - : inferOwner(dest).then(({ uid, gid }) => - this[_empty](dest) - .then(() => mkdirp(dest)) - .then(made => { - // ignore the || dest part in coverage. It's there to handle - // race conditions where the dir may be made by someone else - // after being removed by us. - const dir = made || /* istanbul ignore next */ dest - return this[_chown](dir, uid, gid) - }) - .then(() => ({ uid, gid }))) + async [_mkdir] (dest) { + await this[_empty](dest) + return await fs.mkdir(dest, { recursive: true }) } // extraction is always the same. the only difference is where // the tarball comes from. - extract (dest) { - return this[_mkdir](dest).then(({ uid, gid }) => - this.tarballStream(tarball => this[_extract](dest, tarball, uid, gid))) + async extract (dest) { + await this[_mkdir](dest) + return this.tarballStream((tarball) => this[_extract](dest, tarball)) } [_toFile] (dest) { @@ -414,18 +382,14 @@ class FetcherBase { } // don't use this[_mkdir] because we don't want to rimraf anything - tarballFile (dest) { + async tarballFile (dest) { const dir = dirname(dest) - return !inferOwner - ? mkdirp(dir).then(() => this[_toFile](dest)) - : inferOwner(dest).then(({ uid, gid }) => - mkdirp(dir).then(made => this[_toFile](dest) - .then(res => this[_chown](made || dir, uid, gid) - .then(() => res)))) + await fs.mkdir(dir, { recursive: true }) + return this[_toFile](dest) } - [_extract] (dest, tarball, uid, gid) { - const extractor = tar.x(this[_tarxOptions]({ cwd: dest, uid, gid })) + [_extract] (dest, tarball) { + const extractor = tar.x(this[_tarxOptions]({ cwd: dest })) const p = new Promise((resolve, reject) => { extractor.on('end', () => { resolve({ diff --git a/package.json b/package.json index 1cb2a421..2bbe2f53 100644 --- a/package.json +++ b/package.json @@ -48,12 +48,9 @@ "@npmcli/installed-package-contents": "^1.0.7", "@npmcli/promise-spawn": "^3.0.0", "@npmcli/run-script": "^4.1.0", - "cacache": "^16.0.0", - "chownr": "^2.0.0", + "cacache": "^17.0.0", "fs-minipass": "^2.1.0", - "infer-owner": "^1.0.4", "minipass": "^3.1.6", - "mkdirp": "^1.0.4", "npm-package-arg": "^9.0.0", "npm-packlist": "^7.0.0", "npm-pick-manifest": "^7.0.0", @@ -62,7 +59,6 @@ "promise-retry": "^2.0.1", "read-package-json": "^5.0.0", "read-package-json-fast": "^3.0.0", - "rimraf": "^3.0.2", "ssri": "^9.0.0", "tar": "^6.1.11" }, diff --git a/test/fetcher.js b/test/fetcher.js index d2e30571..2cf8f403 100644 --- a/test/fetcher.js +++ b/test/fetcher.js @@ -1,16 +1,4 @@ -const fakeSudo = process.argv[2] === 'fake-sudo' const fs = require('fs') -process.chownLog = [] -if (fakeSudo) { - process.realUid = process.getuid() - process.getuid = () => 0 - const fakeChown = type => (path, uid, gid, cb) => { - process.chownLog.push({ type, path, uid, gid }) - process.nextTick(cb) - } - fs.chown = fakeChown('chown') - fs.lchown = fakeChown('lchown') -} fs.utimes = () => { throw new Error('do not call utimes') @@ -106,10 +94,6 @@ t.test('tarball data', async t => { t.test('tarballFile', t => { const target = resolve(me, 'tarball-file') - if (fakeSudo) { - process.chownLog.length = 0 - } - t.test('basic copy', t => new FileFetcher(abbrevspec, { cache }) .tarballFile(target + '/basic/1.tgz')) @@ -198,10 +182,6 @@ t.test('extract', t => { }) } - if (fakeSudo) { - process.chownLog.length = 0 - } - return new FileFetcher(abbrevspec, { cache }).extract(target + '/uncached') .then(check('uncached')) .then(() => new FileFetcher(abbrevspec, { @@ -210,12 +190,6 @@ t.test('extract', t => { resolved: abbrev, }).extract(target + '/cached')) .then(check('cached')) - .then(!fakeSudo ? () => {} : () => { - t.not(process.chownLog.length, 0, 'did some chowns') - const log = { uid: process.realUid, gid: process.getgid() } - process.chownLog.forEach(entry => t.match(entry, log, 'chowned to me')) - process.chownLog.length = 0 - }) .then(() => { // test a bad cache entry cacache.get.stream.byDigest = () => { @@ -413,7 +387,6 @@ t.test('a non-retriable cache error', t => { const poop = new Error('poop') poop.code = 'LE_POOP' t.teardown(mutateFS.fail('read', poop)) - t.teardown(() => process.chownLog.length = 0) return cacache.put(cache, 'any-old-key', data, { integrity: abbrevMani._integrity, }).then(() => t.rejects(new FileFetcher(abbrev, { @@ -422,70 +395,65 @@ t.test('a non-retriable cache error', t => { }).manifest(), poop)) }) -// no need to do some of these basic tests in sudo mode -if (!fakeSudo) { - t.spawn(process.execPath, [__filename, 'fake-sudo'], 'fake sudo mode') +t.test('before implies full metadata', t => { + const f = new Fetcher('foo', { before: new Date('1979-07-01') }) + t.equal(f.fullMetadata, true) + t.end() +}) - t.test('before implies full metadata', t => { - const f = new Fetcher('foo', { before: new Date('1979-07-01') }) - t.equal(f.fullMetadata, true) - t.end() +t.test('various projectiles', t => { + t.throws(() => new Fetcher(), { message: 'options object is required' }) + const f = new Fetcher('foo', {}) + // base class doesn't implement functionality + const expect = { + message: 'not implemented in this fetcher type: FetcherBase', + } + t.rejects(f.resolve(), expect) + f.resolved = 'fooblz' + t.resolveMatch(f.resolve(), 'fooblz', 'has resolved') + t.rejects(f.extract('target'), expect) + t.rejects(f.manifest(), expect) + t.rejects(f.packument(), expect) + t.rejects(f.tarball(), expect) + const foo = npa('foo@bar') + foo.type = 'blerg' + t.throws(() => Fetcher.get(foo, {}), { + message: 'Unknown spec type: blerg', }) - t.test('various projectiles', t => { - t.throws(() => new Fetcher(), { message: 'options object is required' }) - const f = new Fetcher('foo', {}) - // base class doesn't implement functionality - const expect = { - message: 'not implemented in this fetcher type: FetcherBase', - } - t.rejects(f.resolve(), expect) - f.resolved = 'fooblz' - t.resolveMatch(f.resolve(), 'fooblz', 'has resolved') - t.rejects(f.extract('target'), expect) - t.rejects(f.manifest(), expect) - t.rejects(f.packument(), expect) - t.rejects(f.tarball(), expect) - const foo = npa('foo@bar') - foo.type = 'blerg' - t.throws(() => Fetcher.get(foo, {}), { - message: 'Unknown spec type: blerg', - }) - - class KidFetcher extends Fetcher { - get types () { - return ['kid'] - } + class KidFetcher extends Fetcher { + get types () { + return ['kid'] } - t.throws(() => new KidFetcher('foo', {}), { - message: `Wrong spec type (tag) for KidFetcher. Supported types: kid`, - }) - t.end() + } + t.throws(() => new KidFetcher('foo', {}), { + message: `Wrong spec type (tag) for KidFetcher. Supported types: kid`, }) + t.end() +}) - t.test('fetcher.get', t => { - const specToType = { - foo: 'RegistryFetcher', - 'foo@bar': 'RegistryFetcher', - 'foo@1.2': 'RegistryFetcher', - 'foo@1.2.3': 'RegistryFetcher', - 'npm:foo@2': 'RegistryFetcher', - '@foo/bar': 'RegistryFetcher', - '@foo/bar@1.2': 'RegistryFetcher', - '@foo/bar@1.2.3': 'RegistryFetcher', - 'foo.tgz': 'FileFetcher', - '/path/to/foo': 'DirFetcher', - 'isaacs/foo': 'GitFetcher', - 'git+https://github.com/isaacs/foo': 'GitFetcher', - 'https://server.com/foo.tgz': 'RemoteFetcher', - } - for (const [spec, type] of Object.entries(specToType)) { - t.equal(Fetcher.get(spec).type, type) - } +t.test('fetcher.get', t => { + const specToType = { + foo: 'RegistryFetcher', + 'foo@bar': 'RegistryFetcher', + 'foo@1.2': 'RegistryFetcher', + 'foo@1.2.3': 'RegistryFetcher', + 'npm:foo@2': 'RegistryFetcher', + '@foo/bar': 'RegistryFetcher', + '@foo/bar@1.2': 'RegistryFetcher', + '@foo/bar@1.2.3': 'RegistryFetcher', + 'foo.tgz': 'FileFetcher', + '/path/to/foo': 'DirFetcher', + 'isaacs/foo': 'GitFetcher', + 'git+https://github.com/isaacs/foo': 'GitFetcher', + 'https://server.com/foo.tgz': 'RemoteFetcher', + } + for (const [spec, type] of Object.entries(specToType)) { + t.equal(Fetcher.get(spec).type, type) + } - t.end() - }) -} + t.end() +}) t.test('make bins executable', async t => { const file = resolve(__dirname, 'fixtures/bin-object.tgz') diff --git a/test/git.js b/test/git.js index f0e4bf18..2048f0a2 100644 --- a/test/git.js +++ b/test/git.js @@ -61,7 +61,6 @@ const fs = require('fs') const http = require('http') const { dirname, basename, resolve } = require('path') -const rimraf = require('rimraf') const fixtures = resolve(__dirname, 'fixtures') const abbrev = resolve(fixtures, 'abbrev-1.1.1.tgz') const prepIgnore = resolve(fixtures, 'prepare-requires-gitignore-1.2.3.tgz') @@ -84,7 +83,8 @@ const spawnGit = require('@npmcli/git').spawn const { spawn } = require('child_process') const spawnNpm = require('../lib/util/npm.js') -const mkdirp = require('mkdirp') +const { mkdir } = require('fs/promises') +const { rmSync } = require('fs') const tar = require('tar') @@ -99,8 +99,8 @@ t.test('setup', { bail: true }, t => { '--no-sign-git-tag', ], repo) - mkdirp.sync(repo) - return git('init') + return mkdir(repo, { recursive: true }) + .then(() => git('init')) .then(() => git('config', 'user.name', 'pacotedev')) .then(() => git('config', 'user.email', 'i+pacotedev@izs.me')) .then(() => git('config', 'tag.gpgSign', 'false')) @@ -180,7 +180,7 @@ t.test('setup', { bail: true }, t => { const name = basename(repoDir) const otherName = basename(other) - mkdirp.sync(repoDir) + await mkdir(repoDir, { recursive: true }) await git('init') await git('config', 'user.name', 'pacotedev') await git('config', 'user.email', 'i+pacotedev@izs.me') @@ -227,15 +227,15 @@ t.test('setup', { bail: true }, t => { } daemon.stderr.on('data', onDaemonData) // only clean up the dir once the daemon is banished - daemon.on('close', () => rimraf.sync(me)) + daemon.on('close', () => rmSync(me, { recursive: true, force: true })) }) t.test('create a repo with a submodule', t => { const submoduleRepo = resolve(me, 'submodule-repo') const git = (...cmd) => spawnGit(cmd, { cwd: submoduleRepo }) const write = (f, c) => fs.writeFileSync(`${submoduleRepo}/${f}`, c) - mkdirp.sync(submoduleRepo) - return git('init') + return mkdir(submoduleRepo, { recursive: true }) + .then(() => git('init')) .then(() => git('config', 'user.name', 'pacotedev')) .then(() => git('config', 'user.email', 'i+pacotedev@izs.me')) .then(() => git('config', 'tag.gpgSign', 'false')) @@ -293,8 +293,8 @@ t.test('setup', { bail: true }, t => { const wsfolder = resolve(me, 'workspaces-repo/a') const git = (...cmd) => spawnGit(cmd, { cwd: workspacesRepo }) const write = (f, c) => fs.writeFileSync(`${workspacesRepo}/${f}`, c) - mkdirp.sync(wsfolder) - return git('init') + return mkdir(wsfolder, { recursive: true }) + .then(() => git('init')) .then(() => git('config', 'user.name', 'pacotedev')) .then(() => git('config', 'user.email', 'i+pacotedev@github.com')) .then(() => git('config', 'tag.gpgSign', 'false')) @@ -322,8 +322,8 @@ t.test('setup', { bail: true }, t => { const prepackRepo = resolve(me, 'prepack-repo') const git = (...cmd) => spawnGit(cmd, { cwd: prepackRepo }) const write = (f, c) => fs.writeFileSync(`${prepackRepo}/${f}`, c) - mkdirp.sync(prepackRepo) - return git('init') + return mkdir(prepackRepo, { recursive: true }) + .then(() => git('init')) .then(() => git('config', 'user.name', 'pacotedev')) .then(() => git('config', 'user.email', 'i+pacotedev@github.com')) .then(() => git('config', 'tag.gpgSign', 'false'))